summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
commit0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d (patch)
treea31f07c9bcca9d56ce61e9a1ffd30ef350d513aa /third_party/libwebrtc/test
parentInitial commit. (diff)
downloadfirefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.tar.xz
firefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.zip
Adding upstream version 115.8.0esr.upstream/115.8.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/test')
-rw-r--r--third_party/libwebrtc/test/BUILD.gn1263
-rw-r--r--third_party/libwebrtc/test/DEPS88
-rw-r--r--third_party/libwebrtc/test/OWNERS7
-rw-r--r--third_party/libwebrtc/test/android/AndroidManifest.xml47
-rw-r--r--third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTest.java26
-rw-r--r--third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTestActivity.java34
-rw-r--r--third_party/libwebrtc/test/audio_decoder_proxy_factory.h99
-rw-r--r--third_party/libwebrtc/test/benchmark_main.cc17
-rw-r--r--third_party/libwebrtc/test/call_config_utils.cc123
-rw-r--r--third_party/libwebrtc/test/call_config_utils.h34
-rw-r--r--third_party/libwebrtc/test/call_config_utils_unittest.cc64
-rw-r--r--third_party/libwebrtc/test/call_test.cc861
-rw-r--r--third_party/libwebrtc/test/call_test.h359
-rw-r--r--third_party/libwebrtc/test/configurable_frame_size_encoder.cc95
-rw-r--r--third_party/libwebrtc/test/configurable_frame_size_encoder.h70
-rw-r--r--third_party/libwebrtc/test/direct_transport.cc145
-rw-r--r--third_party/libwebrtc/test/direct_transport.h86
-rw-r--r--third_party/libwebrtc/test/direct_transport_unittest.cc34
-rw-r--r--third_party/libwebrtc/test/drifting_clock.cc45
-rw-r--r--third_party/libwebrtc/test/drifting_clock.h50
-rw-r--r--third_party/libwebrtc/test/encoder_settings.cc149
-rw-r--r--third_party/libwebrtc/test/encoder_settings.h65
-rw-r--r--third_party/libwebrtc/test/explicit_key_value_config.cc56
-rw-r--r--third_party/libwebrtc/test/explicit_key_value_config.h39
-rw-r--r--third_party/libwebrtc/test/fake_decoder.cc126
-rw-r--r--third_party/libwebrtc/test/fake_decoder.h74
-rw-r--r--third_party/libwebrtc/test/fake_encoded_frame.cc144
-rw-r--r--third_party/libwebrtc/test/fake_encoded_frame.h91
-rw-r--r--third_party/libwebrtc/test/fake_encoder.cc441
-rw-r--r--third_party/libwebrtc/test/fake_encoder.h179
-rw-r--r--third_party/libwebrtc/test/fake_texture_frame.cc51
-rw-r--r--third_party/libwebrtc/test/fake_texture_frame.h44
-rw-r--r--third_party/libwebrtc/test/fake_videorenderer.h28
-rw-r--r--third_party/libwebrtc/test/fake_vp8_decoder.cc93
-rw-r--r--third_party/libwebrtc/test/fake_vp8_decoder.h52
-rw-r--r--third_party/libwebrtc/test/fake_vp8_encoder.cc130
-rw-r--r--third_party/libwebrtc/test/fake_vp8_encoder.h75
-rw-r--r--third_party/libwebrtc/test/fake_vp8_encoder_unittest.cc114
-rw-r--r--third_party/libwebrtc/test/field_trial.cc39
-rw-r--r--third_party/libwebrtc/test/field_trial.h38
-rw-r--r--third_party/libwebrtc/test/frame_forwarder.cc61
-rw-r--r--third_party/libwebrtc/test/frame_forwarder.h54
-rw-r--r--third_party/libwebrtc/test/frame_generator.cc438
-rw-r--r--third_party/libwebrtc/test/frame_generator.h197
-rw-r--r--third_party/libwebrtc/test/frame_generator_capturer.cc323
-rw-r--r--third_party/libwebrtc/test/frame_generator_capturer.h191
-rw-r--r--third_party/libwebrtc/test/frame_generator_capturer_unittest.cc89
-rw-r--r--third_party/libwebrtc/test/frame_generator_unittest.cc284
-rw-r--r--third_party/libwebrtc/test/frame_utils.cc104
-rw-r--r--third_party/libwebrtc/test/frame_utils.h51
-rw-r--r--third_party/libwebrtc/test/function_audio_decoder_factory.h68
-rw-r--r--third_party/libwebrtc/test/fuzzers/BUILD.gn689
-rw-r--r--third_party/libwebrtc/test/fuzzers/DEPS5
-rw-r--r--third_party/libwebrtc/test/fuzzers/OWNERS3
-rw-r--r--third_party/libwebrtc/test/fuzzers/aec3_config_json_fuzzer.cc30
-rw-r--r--third_party/libwebrtc/test/fuzzers/aec3_fuzzer.cc79
-rw-r--r--third_party/libwebrtc/test/fuzzers/agc_fuzzer.cc124
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.cc77
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.h36
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_g722_fuzzer.cc39
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc26
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc61
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc24
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc24
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc56
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_decoder_pcm_fuzzer.cc45
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.cc53
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.h26
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_encoder_opus_fuzzer.cc27
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_processing_configs_fuzzer.cc147
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.cc143
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.h25
-rw-r--r--third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc162
-rw-r--r--third_party/libwebrtc/test/fuzzers/comfort_noise_decoder_fuzzer.cc59
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_fec_config.json151
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_non_interleaved_config.json66
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_single_nal_config.json66
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_config.json29
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_fec_config.json73
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_config.json29
-rw-r--r--third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_fec_config.json79
-rw-r--r--third_party/libwebrtc/test/fuzzers/congestion_controller_feedback_fuzzer.cc52
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/README37
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/aec3-config-json-corpus/default.json1
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-1bin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-2bin0 -> 801699 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-3bin0 -> 801699 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-4bin0 -> 799275 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-0bin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-1bin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-2bin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-3bin0 -> 5341 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-0bin0 -> 136 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-1bin0 -> 118 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-10bin0 -> 51 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-111
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-12bin0 -> 48 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-13bin0 -> 349 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-14bin0 -> 1872 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-15bin0 -> 48 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-16bin0 -> 349 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-2bin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-3bin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-4bin0 -> 2958 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-5bin0 -> 25 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-6bin0 -> 109 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-7bin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-82
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-9bin0 -> 13 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-01
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-21
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-0bin0 -> 10 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-1bin0 -> 127 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/1.mdnsbin0 -> 62 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/10.mdnsbin0 -> 17 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/11.mdnsbin0 -> 129 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/12.mdnsbin0 -> 17 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/13.mdnsbin0 -> 72 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/14.mdnsbin0 -> 650 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/15.mdnsbin0 -> 30 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/16.mdnsbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/17.mdnsbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/18.mdnsbin0 -> 57 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/19.mdnsbin0 -> 144 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/2.mdnsbin0 -> 132 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/20.mdnsbin0 -> 51 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/3.mdnsbin0 -> 106 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/4.mdnsbin0 -> 103 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/5.mdnsbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/6.mdnsbin0 -> 14 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/7.mdnsbin0 -> 252 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/8.mdnsbin0 -> 72 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/9.mdnsbin0 -> 27 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/pseudotcp-corpus/785b96587d0eb44dd5d75b7a886f37e2ac504511bin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/0.rtcpbin0 -> 72 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/1.rtcpbin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/10.rtcpbin0 -> 76 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/11.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/12.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/13.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/14.rtcpbin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/15.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/16.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/17.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/18.rtcpbin0 -> 28 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/19.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/2.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/20.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/21.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/22.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/23.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/24.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/25.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/26.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/27.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/28.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/29.rtcpbin0 -> 56 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/3.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/30.rtcpbin0 -> 28 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/31.rtcpbin0 -> 28 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/32.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/33.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/34.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/35.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/36.rtcpbin0 -> 52 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/37.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/38.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/39.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/4.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/40.rtcpbin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/41.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/42.rtcpbin0 -> 11 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/43.rtcpbin0 -> 36 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/44.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/45.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/46.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/47.rtcp1
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/48.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/49.rtcpbin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/5.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/50.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/51.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/52.rtcpbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/53.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/54.rtcpbin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/55.rtcp0
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/56.rtcpbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/57.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/58.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/59.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/6.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/60.rtcpbin0 -> 28 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/61.rtcpbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/62.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/63.rtcpbin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/64.rtcpbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/65.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/66.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/7.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/8.rtcpbin0 -> 4 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/9.rtcpbin0 -> 16 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-0bin0 -> 12 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-1bin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-2bin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-3bin0 -> 43 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-4bin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-5bin0 -> 261774 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-6bin0 -> 261774 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-7bin0 -> 58 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-8bin0 -> 24 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtp-depacketizer-av1-assemble-frame-corpus/av1-assemble-frame-01
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp8/vp8.rtpdumpbin0 -> 167509 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp9/vp9.rtpdumpbin0 -> 192742 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-ack-sack.binbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data-data.binbin0 -> 396 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data.binbin0 -> 288 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data.binbin0 -> 288 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/data-fragment1.binbin0 -> 1212 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/forward-tsn.binbin0 -> 20 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat-ack.binbin0 -> 56 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat.binbin0 -> 56 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init-ack.binbin0 -> 364 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init.binbin0 -> 104 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/re-config.binbin0 -> 34 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-data.binbin0 -> 672 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-gap-ack-1.binbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/10.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/11.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/12.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/13.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/14.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/15.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/16.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/17.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/18.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/19.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/2.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/20.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/21.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/22.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/23.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/24.sdp8
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/25.sdp8
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/26.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/27.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/28.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/29.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/3.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/30.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/31.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/32.sdp8
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/33.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/34.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/35.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/36.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/37.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/38.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/39.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/4.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/40.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/41.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/42.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/43.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/44.sdp5
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/45.sdp5
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/46.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/47.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/48.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/49.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/5.sdp10
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/50.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/51.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/52.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/53.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/54.sdp7
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/55.sdp8
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/6.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/7.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/8.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/9.sdp9
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-1.sdp58
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-2.sdp43
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-1.sdp85
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-2.sdp73
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.1.sdp41
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.2.sdp41
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-1.sdp52
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-2.sdp40
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-3.sdp13
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-4.sdp11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-5.sdp5
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-6.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-7.sdp12
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-8.sdp64
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-9.sdp66
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/sdp.tokens56
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/01
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/11
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/21
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/31
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/41
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/51
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/0.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/1.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/10.stunbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/11.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/12.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/13.stunbin0 -> 28 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/14.stunbin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/15.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/16.stunbin0 -> 40 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/17.stunbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/2.stunbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/3.stunbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/4.stunbin0 -> 44 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/5.stunbin0 -> 108 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/6.stunbin0 -> 80 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/7.stunbin0 -> 92 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/8.stunbin0 -> 116 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/9.stunbin0 -> 32 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/validator-crash-1.stunbin0 -> 80 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/stun.tokens2
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/video_layers_allocation-corpus/vla-01
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/0cee4d5fd2905dc1fb2979f10a9724265b7075e2bin0 -> 11 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a1c75436e1872a23391d58316d88c45da0fb7682bin0 -> 7 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a8b3fb7be82395c9462684c766841d668dc0029fbin0 -> 8 bytes
-rw-r--r--third_party/libwebrtc/test/fuzzers/dcsctp_packet_fuzzer.cc29
-rw-r--r--third_party/libwebrtc/test/fuzzers/dcsctp_socket_fuzzer.cc28
-rw-r--r--third_party/libwebrtc/test/fuzzers/field_trial_fuzzer.cc26
-rw-r--r--third_party/libwebrtc/test/fuzzers/flexfec_header_reader_fuzzer.cc36
-rw-r--r--third_party/libwebrtc/test/fuzzers/flexfec_receiver_fuzzer.cc71
-rw-r--r--third_party/libwebrtc/test/fuzzers/flexfec_sender_fuzzer.cc67
-rw-r--r--third_party/libwebrtc/test/fuzzers/forward_error_correction_fuzzer.cc119
-rw-r--r--third_party/libwebrtc/test/fuzzers/frame_buffer2_fuzzer.cc116
-rw-r--r--third_party/libwebrtc/test/fuzzers/frame_buffer_fuzzer.cc87
-rw-r--r--third_party/libwebrtc/test/fuzzers/fuzz_data_helper.cc20
-rw-r--r--third_party/libwebrtc/test/fuzzers/fuzz_data_helper.h105
-rw-r--r--third_party/libwebrtc/test/fuzzers/h264_bitstream_parser_fuzzer.cc21
-rw-r--r--third_party/libwebrtc/test/fuzzers/h264_depacketizer_fuzzer.cc19
-rw-r--r--third_party/libwebrtc/test/fuzzers/neteq_rtp_fuzzer.cc184
-rw-r--r--third_party/libwebrtc/test/fuzzers/neteq_signal_fuzzer.cc201
-rw-r--r--third_party/libwebrtc/test/fuzzers/pseudotcp_parser_fuzzer.cc47
-rw-r--r--third_party/libwebrtc/test/fuzzers/residual_echo_detector_fuzzer.cc66
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtcp_receiver_fuzzer.cc52
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc39
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc94
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc154
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_packet_fuzzer.cc182
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_packetizer_av1_fuzzer.cc71
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_video_frame_assembler_fuzzer.cc44
-rw-r--r--third_party/libwebrtc/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc64
-rw-r--r--third_party/libwebrtc/test/fuzzers/sctp_utils_fuzzer.cc31
-rw-r--r--third_party/libwebrtc/test/fuzzers/sdp_integration_fuzzer.cc66
-rw-r--r--third_party/libwebrtc/test/fuzzers/sdp_parser_fuzzer.cc28
-rw-r--r--third_party/libwebrtc/test/fuzzers/ssl_certificate_fuzzer.cc49
-rw-r--r--third_party/libwebrtc/test/fuzzers/string_to_number_fuzzer.cc35
-rw-r--r--third_party/libwebrtc/test/fuzzers/stun_parser_fuzzer.cc29
-rw-r--r--third_party/libwebrtc/test/fuzzers/stun_validator_fuzzer.cc23
-rw-r--r--third_party/libwebrtc/test/fuzzers/turn_unwrap_fuzzer.cc22
-rw-r--r--third_party/libwebrtc/test/fuzzers/ulpfec_generator_fuzzer.cc70
-rw-r--r--third_party/libwebrtc/test/fuzzers/ulpfec_header_reader_fuzzer.cc36
-rw-r--r--third_party/libwebrtc/test/fuzzers/ulpfec_receiver_fuzzer.cc71
-rw-r--r--third_party/libwebrtc/test/fuzzers/utils/BUILD.gn47
-rw-r--r--third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc200
-rw-r--r--third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.h92
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc20
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc17
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp8_replay_fuzzer.cc42
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc20
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp9_encoder_references_fuzzer.cc624
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp9_qp_parser_fuzzer.cc17
-rw-r--r--third_party/libwebrtc/test/fuzzers/vp9_replay_fuzzer.cc41
-rw-r--r--third_party/libwebrtc/test/fuzzers/webrtc_fuzzer_main.cc41
-rw-r--r--third_party/libwebrtc/test/gl/gl_renderer.cc112
-rw-r--r--third_party/libwebrtc/test/gl/gl_renderer.h52
-rw-r--r--third_party/libwebrtc/test/gmock.h20
-rw-r--r--third_party/libwebrtc/test/gtest.h28
-rw-r--r--third_party/libwebrtc/test/ios/Info.plist47
-rw-r--r--third_party/libwebrtc/test/ios/coverage_util_ios.h24
-rw-r--r--third_party/libwebrtc/test/ios/coverage_util_ios.mm42
-rw-r--r--third_party/libwebrtc/test/ios/google_test_runner.mm40
-rw-r--r--third_party/libwebrtc/test/ios/google_test_runner_delegate.h28
-rw-r--r--third_party/libwebrtc/test/ios/test_support.h39
-rw-r--r--third_party/libwebrtc/test/ios/test_support.mm217
-rw-r--r--third_party/libwebrtc/test/layer_filtering_transport.cc186
-rw-r--r--third_party/libwebrtc/test/layer_filtering_transport.h80
-rw-r--r--third_party/libwebrtc/test/linux/glx_renderer.cc175
-rw-r--r--third_party/libwebrtc/test/linux/glx_renderer.h50
-rw-r--r--third_party/libwebrtc/test/linux/video_renderer_linux.cc28
-rw-r--r--third_party/libwebrtc/test/logging/BUILD.gn35
-rw-r--r--third_party/libwebrtc/test/logging/file_log_writer.cc65
-rw-r--r--third_party/libwebrtc/test/logging/file_log_writer.h50
-rw-r--r--third_party/libwebrtc/test/logging/log_writer.cc26
-rw-r--r--third_party/libwebrtc/test/logging/log_writer.h65
-rw-r--r--third_party/libwebrtc/test/logging/memory_log_writer.cc64
-rw-r--r--third_party/libwebrtc/test/logging/memory_log_writer.h40
-rw-r--r--third_party/libwebrtc/test/mac/Info.plist16
-rw-r--r--third_party/libwebrtc/test/mac/run_test.mm73
-rw-r--r--third_party/libwebrtc/test/mac/video_renderer_mac.h40
-rw-r--r--third_party/libwebrtc/test/mac/video_renderer_mac.mm127
-rw-r--r--third_party/libwebrtc/test/mac_capturer.h50
-rw-r--r--third_party/libwebrtc/test/mac_capturer.mm107
-rw-r--r--third_party/libwebrtc/test/mappable_native_buffer.cc185
-rw-r--r--third_party/libwebrtc/test/mappable_native_buffer.h122
-rw-r--r--third_party/libwebrtc/test/mock_audio_decoder.cc20
-rw-r--r--third_party/libwebrtc/test/mock_audio_decoder.h38
-rw-r--r--third_party/libwebrtc/test/mock_audio_decoder_factory.h92
-rw-r--r--third_party/libwebrtc/test/mock_audio_encoder.cc57
-rw-r--r--third_party/libwebrtc/test/mock_audio_encoder.h116
-rw-r--r--third_party/libwebrtc/test/mock_audio_encoder_factory.h100
-rw-r--r--third_party/libwebrtc/test/mock_frame_transformer.h45
-rw-r--r--third_party/libwebrtc/test/mock_transformable_frame.h30
-rw-r--r--third_party/libwebrtc/test/mock_transport.cc18
-rw-r--r--third_party/libwebrtc/test/mock_transport.h33
-rw-r--r--third_party/libwebrtc/test/network/BUILD.gn199
-rw-r--r--third_party/libwebrtc/test/network/OWNERS1
-rw-r--r--third_party/libwebrtc/test/network/cross_traffic.cc322
-rw-r--r--third_party/libwebrtc/test/network/cross_traffic.h174
-rw-r--r--third_party/libwebrtc/test/network/cross_traffic_unittest.cc163
-rw-r--r--third_party/libwebrtc/test/network/emulated_network_manager.cc122
-rw-r--r--third_party/libwebrtc/test/network/emulated_network_manager.h83
-rw-r--r--third_party/libwebrtc/test/network/emulated_turn_server.cc191
-rw-r--r--third_party/libwebrtc/test/network/emulated_turn_server.h98
-rw-r--r--third_party/libwebrtc/test/network/fake_network_socket_server.cc322
-rw-r--r--third_party/libwebrtc/test/network/fake_network_socket_server.h63
-rw-r--r--third_party/libwebrtc/test/network/feedback_generator.cc111
-rw-r--r--third_party/libwebrtc/test/network/feedback_generator.h60
-rw-r--r--third_party/libwebrtc/test/network/feedback_generator_unittest.cc47
-rw-r--r--third_party/libwebrtc/test/network/g3doc/index.md137
-rw-r--r--third_party/libwebrtc/test/network/g3doc/network_emulation_framework.pngbin0 -> 126248 bytes
-rw-r--r--third_party/libwebrtc/test/network/g3doc/network_injection_into_peer_connection.pngbin0 -> 65121 bytes
-rw-r--r--third_party/libwebrtc/test/network/network_emulation.cc767
-rw-r--r--third_party/libwebrtc/test/network/network_emulation.h467
-rw-r--r--third_party/libwebrtc/test/network/network_emulation_manager.cc373
-rw-r--r--third_party/libwebrtc/test/network/network_emulation_manager.h138
-rw-r--r--third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc319
-rw-r--r--third_party/libwebrtc/test/network/network_emulation_unittest.cc676
-rw-r--r--third_party/libwebrtc/test/network/traffic_route.cc91
-rw-r--r--third_party/libwebrtc/test/network/traffic_route.h57
-rw-r--r--third_party/libwebrtc/test/null_platform_renderer.cc22
-rw-r--r--third_party/libwebrtc/test/null_transport.cc26
-rw-r--r--third_party/libwebrtc/test/null_transport.h30
-rw-r--r--third_party/libwebrtc/test/pc/e2e/BUILD.gn573
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc175
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h81
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn573
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc220
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h106
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc598
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc85
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h73
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc160
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc1228
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h197
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc45
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h36
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc209
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h169
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc575
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h157
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc1648
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc52
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h132
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc682
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc172
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h284
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc121
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h100
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc126
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc2204
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h79
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc168
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h101
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue.h168
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc206
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.cc101
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.h94
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection_test.cc152
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc272
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h153
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc403
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h194
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc60
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h34
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc61
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc187
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h104
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc445
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.cc118
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.h56
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc196
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.cc37
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h46
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc57
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc264
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h170
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc162
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h81
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer_helper.cc63
-rw-r--r--third_party/libwebrtc/test/pc/e2e/analyzer_helper.h61
-rw-r--r--third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc151
-rw-r--r--third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.h68
-rw-r--r--third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.cc117
-rw-r--r--third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.h79
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/architecture.md209
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/default_video_quality_analyzer.md197
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/in_test_psnr_plot.pngbin0 -> 39236 bytes
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/index.md224
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/single_process_encoded_image_data_injector.pngbin0 -> 78481 bytes
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/video_quality_analyzer_pipeline.pngbin0 -> 35899 bytes
-rw-r--r--third_party/libwebrtc/test/pc/e2e/g3doc/vp8_simulcast_offer_modification.pngbin0 -> 79641 bytes
-rw-r--r--third_party/libwebrtc/test/pc/e2e/media/media_helper.cc128
-rw-r--r--third_party/libwebrtc/test/pc/e2e/media/media_helper.h58
-rw-r--r--third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h55
-rw-r--r--third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h60
-rw-r--r--third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc183
-rw-r--r--third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.h72
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc536
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc763
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h155
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc1102
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc139
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.cc217
-rw-r--r--third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.h52
-rw-r--r--third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.cc601
-rw-r--r--third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.h146
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc592
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h136
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc150
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_poller.cc78
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_poller.h80
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_poller_test.cc90
-rw-r--r--third_party/libwebrtc/test/pc/e2e/stats_provider.h29
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_activities_executor.cc122
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_activities_executor.h85
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_peer.cc151
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_peer.h188
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_peer_factory.cc374
-rw-r--r--third_party/libwebrtc/test/pc/e2e/test_peer_factory.h84
-rw-r--r--third_party/libwebrtc/test/pc/sctp/BUILD.gn18
-rw-r--r--third_party/libwebrtc/test/pc/sctp/fake_sctp_transport.h79
-rw-r--r--third_party/libwebrtc/test/peer_scenario/BUILD.gn68
-rw-r--r--third_party/libwebrtc/test/peer_scenario/DEPS5
-rw-r--r--third_party/libwebrtc/test/peer_scenario/peer_scenario.cc127
-rw-r--r--third_party/libwebrtc/test/peer_scenario/peer_scenario.h122
-rw-r--r--third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc428
-rw-r--r--third_party/libwebrtc/test/peer_scenario/peer_scenario_client.h179
-rw-r--r--third_party/libwebrtc/test/peer_scenario/scenario_connection.cc242
-rw-r--r--third_party/libwebrtc/test/peer_scenario/scenario_connection.h66
-rw-r--r--third_party/libwebrtc/test/peer_scenario/signaling_route.cc114
-rw-r--r--third_party/libwebrtc/test/peer_scenario/signaling_route.h67
-rw-r--r--third_party/libwebrtc/test/peer_scenario/tests/BUILD.gn30
-rw-r--r--third_party/libwebrtc/test/peer_scenario/tests/peer_scenario_quality_test.cc46
-rw-r--r--third_party/libwebrtc/test/peer_scenario/tests/remote_estimate_test.cc112
-rw-r--r--third_party/libwebrtc/test/peer_scenario/tests/unsignaled_stream_test.cc270
-rw-r--r--third_party/libwebrtc/test/platform_video_capturer.cc37
-rw-r--r--third_party/libwebrtc/test/platform_video_capturer.h29
-rw-r--r--third_party/libwebrtc/test/rtcp_packet_parser.cc112
-rw-r--r--third_party/libwebrtc/test/rtcp_packet_parser.h130
-rw-r--r--third_party/libwebrtc/test/rtp_file_reader.cc691
-rw-r--r--third_party/libwebrtc/test/rtp_file_reader.h51
-rw-r--r--third_party/libwebrtc/test/rtp_file_reader_unittest.cc126
-rw-r--r--third_party/libwebrtc/test/rtp_file_writer.cc114
-rw-r--r--third_party/libwebrtc/test/rtp_file_writer.h32
-rw-r--r--third_party/libwebrtc/test/rtp_file_writer_unittest.cc85
-rw-r--r--third_party/libwebrtc/test/rtp_rtcp_observer.h151
-rw-r--r--third_party/libwebrtc/test/rtp_test_utils_gn/moz.build216
-rw-r--r--third_party/libwebrtc/test/run_loop.cc73
-rw-r--r--third_party/libwebrtc/test/run_loop.h74
-rw-r--r--third_party/libwebrtc/test/run_loop_unittest.cc60
-rw-r--r--third_party/libwebrtc/test/run_test.cc21
-rw-r--r--third_party/libwebrtc/test/run_test.h22
-rw-r--r--third_party/libwebrtc/test/scenario/BUILD.gn200
-rw-r--r--third_party/libwebrtc/test/scenario/OWNERS2
-rw-r--r--third_party/libwebrtc/test/scenario/audio_stream.cc241
-rw-r--r--third_party/libwebrtc/test/scenario/audio_stream.h110
-rw-r--r--third_party/libwebrtc/test/scenario/call_client.cc386
-rw-r--r--third_party/libwebrtc/test/scenario/call_client.h204
-rw-r--r--third_party/libwebrtc/test/scenario/column_printer.cc73
-rw-r--r--third_party/libwebrtc/test/scenario/column_printer.h66
-rw-r--r--third_party/libwebrtc/test/scenario/hardware_codecs.cc52
-rw-r--r--third_party/libwebrtc/test/scenario/hardware_codecs.h24
-rw-r--r--third_party/libwebrtc/test/scenario/network_node.cc144
-rw-r--r--third_party/libwebrtc/test/scenario/network_node.h83
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats.cc47
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats.h108
-rw-r--r--third_party/libwebrtc/test/scenario/performance_stats_unittest.cc27
-rw-r--r--third_party/libwebrtc/test/scenario/probing_test.cc135
-rw-r--r--third_party/libwebrtc/test/scenario/scenario.cc355
-rw-r--r--third_party/libwebrtc/test/scenario/scenario.h189
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_config.cc47
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_config.h231
-rw-r--r--third_party/libwebrtc/test/scenario/scenario_unittest.cc196
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection.cc190
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection.h110
-rw-r--r--third_party/libwebrtc/test/scenario/stats_collection_unittest.cc114
-rw-r--r--third_party/libwebrtc/test/scenario/video_frame_matcher.cc188
-rw-r--r--third_party/libwebrtc/test/scenario/video_frame_matcher.h134
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream.cc636
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream.h138
-rw-r--r--third_party/libwebrtc/test/scenario/video_stream_unittest.cc322
-rw-r--r--third_party/libwebrtc/test/scoped_key_value_config.cc122
-rw-r--r--third_party/libwebrtc/test/scoped_key_value_config.h54
-rw-r--r--third_party/libwebrtc/test/test_flags.cc51
-rw-r--r--third_party/libwebrtc/test/test_flags.h24
-rw-r--r--third_party/libwebrtc/test/test_main.cc73
-rw-r--r--third_party/libwebrtc/test/test_main_lib.cc267
-rw-r--r--third_party/libwebrtc/test/test_main_lib.h43
-rw-r--r--third_party/libwebrtc/test/test_video_capturer.cc107
-rw-r--r--third_party/libwebrtc/test/test_video_capturer.h64
-rw-r--r--third_party/libwebrtc/test/testsupport/DEPS4
-rw-r--r--third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.cc45
-rw-r--r--third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.h49
-rw-r--r--third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer_unittest.cc57
-rw-r--r--third_party/libwebrtc/test/testsupport/file_utils.cc250
-rw-r--r--third_party/libwebrtc/test/testsupport/file_utils.h109
-rw-r--r--third_party/libwebrtc/test/testsupport/file_utils_override.cc170
-rw-r--r--third_party/libwebrtc/test/testsupport/file_utils_override.h57
-rw-r--r--third_party/libwebrtc/test/testsupport/file_utils_unittest.cc277
-rw-r--r--third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.cc114
-rw-r--r--third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.h87
-rw-r--r--third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter_test.cc320
-rw-r--r--third_party/libwebrtc/test/testsupport/frame_reader.h149
-rw-r--r--third_party/libwebrtc/test/testsupport/frame_writer.h104
-rw-r--r--third_party/libwebrtc/test/testsupport/ios_file_utils.h29
-rw-r--r--third_party/libwebrtc/test/testsupport/ios_file_utils.mm61
-rw-r--r--third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc147
-rw-r--r--third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.h87
-rw-r--r--third_party/libwebrtc/test/testsupport/ivf_video_frame_generator_unittest.cc212
-rw-r--r--third_party/libwebrtc/test/testsupport/jpeg_frame_writer.cc88
-rw-r--r--third_party/libwebrtc/test/testsupport/jpeg_frame_writer_ios.cc30
-rw-r--r--third_party/libwebrtc/test/testsupport/mac_file_utils.h24
-rw-r--r--third_party/libwebrtc/test/testsupport/mac_file_utils.mm43
-rw-r--r--third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h40
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test.cc355
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test.h124
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.cc201
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.h24
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_no_protobuf.cc24
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_unittest.cc216
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_result_writer.h58
-rw-r--r--third_party/libwebrtc/test/testsupport/perf_test_unittest.cc205
-rw-r--r--third_party/libwebrtc/test/testsupport/resources_dir_flag.cc21
-rw-r--r--third_party/libwebrtc/test/testsupport/resources_dir_flag.h20
-rw-r--r--third_party/libwebrtc/test/testsupport/rtc_expect_death.h23
-rw-r--r--third_party/libwebrtc/test/testsupport/test_artifacts.cc71
-rw-r--r--third_party/libwebrtc/test/testsupport/test_artifacts.h40
-rw-r--r--third_party/libwebrtc/test/testsupport/test_artifacts_unittest.cc62
-rw-r--r--third_party/libwebrtc/test/testsupport/video_frame_writer.cc111
-rw-r--r--third_party/libwebrtc/test/testsupport/video_frame_writer.h63
-rw-r--r--third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc173
-rw-r--r--third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc92
-rw-r--r--third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc158
-rw-r--r--third_party/libwebrtc/test/testsupport/y4m_frame_writer.cc59
-rw-r--r--third_party/libwebrtc/test/testsupport/y4m_frame_writer_unittest.cc81
-rw-r--r--third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc162
-rw-r--r--third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc146
-rw-r--r--third_party/libwebrtc/test/testsupport/yuv_frame_writer.cc80
-rw-r--r--third_party/libwebrtc/test/testsupport/yuv_frame_writer_unittest.cc73
-rw-r--r--third_party/libwebrtc/test/time_controller/BUILD.gn70
-rw-r--r--third_party/libwebrtc/test/time_controller/external_time_controller.cc134
-rw-r--r--third_party/libwebrtc/test/time_controller/external_time_controller.h69
-rw-r--r--third_party/libwebrtc/test/time_controller/external_time_controller_unittest.cc179
-rw-r--r--third_party/libwebrtc/test/time_controller/real_time_controller.cc66
-rw-r--r--third_party/libwebrtc/test/time_controller/real_time_controller.h41
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_task_queue.cc89
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_task_queue.h65
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_thread.cc118
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_thread.h66
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_time_controller.cc224
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_time_controller.h162
-rw-r--r--third_party/libwebrtc/test/time_controller/simulated_time_controller_unittest.cc149
-rw-r--r--third_party/libwebrtc/test/time_controller/time_controller_conformance_test.cc181
-rw-r--r--third_party/libwebrtc/test/vcm_capturer.cc98
-rw-r--r--third_party/libwebrtc/test/vcm_capturer.h49
-rw-r--r--third_party/libwebrtc/test/video_codec_settings.h68
-rw-r--r--third_party/libwebrtc/test/video_decoder_proxy_factory.h79
-rw-r--r--third_party/libwebrtc/test/video_encoder_nullable_proxy_factory.h45
-rw-r--r--third_party/libwebrtc/test/video_encoder_proxy_factory.h158
-rw-r--r--third_party/libwebrtc/test/video_renderer.cc31
-rw-r--r--third_party/libwebrtc/test/video_renderer.h44
-rw-r--r--third_party/libwebrtc/test/win/d3d_renderer.cc206
-rw-r--r--third_party/libwebrtc/test/win/d3d_renderer.h55
684 files changed, 62289 insertions, 0 deletions
diff --git a/third_party/libwebrtc/test/BUILD.gn b/third_party/libwebrtc/test/BUILD.gn
new file mode 100644
index 0000000000..04a718c411
--- /dev/null
+++ b/third_party/libwebrtc/test/BUILD.gn
@@ -0,0 +1,1263 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/ui.gni")
+import("//third_party/google_benchmark/buildconfig.gni")
+import("../webrtc.gni")
+if (is_android) {
+ import("//build/config/android/rules.gni")
+}
+
+if (!build_with_chromium) {
+ group("test") {
+ testonly = true
+
+ deps = [
+ ":copy_to_file_audio_capturer",
+ ":rtp_test_utils",
+ ":test_common",
+ ":test_renderer",
+ ":test_support",
+ ":video_test_common",
+ ]
+
+ if (rtc_include_tests) {
+ deps += [
+ ":test_main",
+ ":test_support_unittests",
+ "pc/e2e",
+ ]
+ }
+ }
+}
+
+rtc_library("frame_generator_impl") {
+ visibility = [
+ ":*",
+ "../api:create_frame_generator",
+ ]
+ testonly = true
+ sources = [
+ "frame_generator.cc",
+ "frame_generator.h",
+ "testsupport/ivf_video_frame_generator.cc",
+ "testsupport/ivf_video_frame_generator.h",
+ ]
+ deps = [
+ ":frame_utils",
+ "../api:frame_generator_api",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/video:encoded_image",
+ "../api/video:video_frame",
+ "../api/video:video_frame_i010",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:media_constants",
+ "../media:rtc_media_base",
+ "../modules/video_coding:video_codec_interface",
+ "../modules/video_coding:video_coding_utility",
+ "../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_vp8",
+ "../modules/video_coding:webrtc_vp9",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:logging",
+ "../rtc_base:random",
+ "../rtc_base:rtc_event",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/system:file_wrapper",
+ "../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("frame_utils") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "frame_utils.cc",
+ "frame_utils.h",
+ ]
+ deps = [
+ "../api:scoped_refptr",
+ "../api/video:video_frame",
+ ]
+}
+
+rtc_library("video_test_common") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "fake_texture_frame.cc",
+ "fake_texture_frame.h",
+ "fake_videorenderer.h",
+ "frame_forwarder.cc",
+ "frame_forwarder.h",
+ "frame_generator_capturer.cc",
+ "frame_generator_capturer.h",
+ "mappable_native_buffer.cc",
+ "mappable_native_buffer.h",
+ "test_video_capturer.cc",
+ "test_video_capturer.h",
+ "video_codec_settings.h",
+ "video_decoder_proxy_factory.h",
+ "video_encoder_nullable_proxy_factory.h",
+ "video_encoder_proxy_factory.h",
+ ]
+
+ deps = [
+ ":fileutils",
+ ":frame_utils",
+ ":scoped_key_value_config",
+ "../api:array_view",
+ "../api:create_frame_generator",
+ "../api:frame_generator_api",
+ "../api:scoped_refptr",
+ "../api/task_queue",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:logging",
+ "../rtc_base:refcount",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/task_utils:repeating_task",
+ "../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+}
+
+rtc_library("audio_test_common") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "audio_decoder_proxy_factory.h",
+ "function_audio_decoder_factory.h",
+ ]
+ deps = [
+ "../api/audio_codecs:audio_codecs_api",
+ "../rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+}
+
+if (!build_with_mozilla) {
+if (!build_with_chromium) {
+ if (is_mac || is_ios) {
+ rtc_library("video_test_mac") {
+ testonly = true
+ sources = [
+ "mac_capturer.h",
+ "mac_capturer.mm",
+ ]
+ deps = [
+ ":video_test_common",
+ "../api:libjingle_peerconnection_api",
+ "../api:media_stream_interface",
+ "../api:scoped_refptr",
+ "../modules/video_capture:video_capture_module",
+ "../rtc_base:threading",
+ "../sdk:base_objc",
+ "../sdk:native_api",
+ "../sdk:native_video",
+ "../sdk:videocapture_objc",
+ ]
+ }
+ }
+
+ rtc_library("platform_video_capturer") {
+ testonly = true
+ sources = [
+ "platform_video_capturer.cc",
+ "platform_video_capturer.h",
+ ]
+ deps = [ ":video_test_common" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ if (is_mac || is_ios) {
+ deps += [ ":video_test_mac" ]
+ } else {
+ sources += [
+ "vcm_capturer.cc",
+ "vcm_capturer.h",
+ ]
+ deps += [
+ ":scoped_key_value_config",
+ "../api:scoped_refptr",
+ "../modules/video_capture:video_capture_module",
+ "../rtc_base:checks",
+ "../rtc_base:logging",
+ ]
+ }
+ }
+}
+}
+
+rtc_library("rtp_test_utils") {
+ if (build_with_mozilla) {
+ sources = []
+ } else {
+ testonly = true
+ sources = [
+ "rtcp_packet_parser.cc",
+ "rtcp_packet_parser.h",
+ "rtp_file_reader.cc",
+ "rtp_file_reader.h",
+ "rtp_file_writer.cc",
+ "rtp_file_writer.h",
+ ]
+ }
+
+ deps = [
+ "../api:array_view",
+ "../api:rtp_parameters",
+ "../modules/rtp_rtcp",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:logging",
+ "../rtc_base:macromagic",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/system:arch",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("field_trial") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "field_trial.cc",
+ "field_trial.h",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ deps = [
+ "../rtc_base:checks",
+ "../system_wrappers:field_trial",
+ ]
+}
+
+rtc_library("explicit_key_value_config") {
+ sources = [
+ "explicit_key_value_config.cc",
+ "explicit_key_value_config.h",
+ ]
+
+ deps = [
+ "../api:field_trials_registry",
+ "../rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
+rtc_library("scoped_key_value_config") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "scoped_key_value_config.cc",
+ "scoped_key_value_config.h",
+ ]
+
+ deps = [
+ ":field_trial",
+ "../api:field_trials_registry",
+ "../rtc_base:checks",
+ "../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
+rtc_library("perf_test") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "testsupport/perf_test.cc",
+ "testsupport/perf_test.h",
+ "testsupport/perf_test_histogram_writer.h",
+ "testsupport/perf_test_result_writer.h",
+ ]
+ deps = [
+ "../api:array_view",
+ "../api/numerics",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:logging",
+ "../rtc_base:rtc_numerics",
+ "../rtc_base:stringutils",
+ "../rtc_base/synchronization:mutex",
+ "../test:fileutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (rtc_enable_protobuf) {
+ sources += [ "testsupport/perf_test_histogram_writer.cc" ]
+ deps += [
+ "//third_party/catapult/tracing/tracing:histogram",
+ "//third_party/catapult/tracing/tracing:reserved_infos",
+ ]
+ } else {
+ sources += [ "testsupport/perf_test_histogram_writer_no_protobuf.cc" ]
+ }
+}
+
+if (is_ios) {
+ rtc_library("test_support_objc") {
+ testonly = true
+ visibility = [
+ ":google_test_runner_objc",
+ ":test_support",
+ ]
+ sources = [
+ "ios/coverage_util_ios.h",
+ "ios/coverage_util_ios.mm",
+ "ios/google_test_runner_delegate.h",
+ "ios/test_support.h",
+ "ios/test_support.mm",
+ ]
+ deps = [
+ ":perf_test",
+ "../api/test/metrics:chrome_perf_dashboard_metrics_exporter",
+ "../api/test/metrics:global_metrics_logger_and_exporter",
+ "../api/test/metrics:metrics_exporter",
+ "../api/test/metrics:metrics_set_proto_file_exporter",
+ "../api/test/metrics:print_result_proxy_metrics_exporter",
+ "../api/test/metrics:stdout_metrics_exporter",
+ "../sdk:helpers_objc",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ configs += [ ":test_support_objc_config" ]
+ }
+
+ rtc_library("google_test_runner_objc") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [ "ios/google_test_runner.mm" ]
+ deps = [ ":test_support_objc" ]
+ configs += [ "//build/config/ios:xctest_config" ]
+ frameworks = [ "UIKit.framework" ]
+ }
+
+ config("test_support_objc_config") {
+ defines = []
+
+ if (use_clang_coverage) {
+ defines += [ "WEBRTC_IOS_ENABLE_COVERAGE" ]
+ }
+ }
+}
+
+config("suppress_warning_4373") {
+ if (is_win) {
+ cflags = [
+ # MSVC has a bug which generates this warning when using mocks; see the
+ # section on warning 4373 in he googlemock FAQ. This warning isn't the
+ # least relevant for us, anyway.
+ "/wd4373",
+ ]
+ }
+}
+
+config("test_main_direct_config") {
+ visibility = [ ":*" ]
+ defines = [ "WEBRTC_UNIT_TEST" ]
+}
+rtc_source_set("test_support") {
+ visibility = [ "*" ]
+ testonly = true
+
+ all_dependent_configs = [
+ ":suppress_warning_4373",
+ "//third_party/googletest:gmock_config",
+ "//third_party/googletest:gtest_config",
+ ]
+
+ sources = [
+ "gmock.h",
+ "gtest.h",
+ ]
+
+ public_deps = [] # no-presubmit-check TODO(webrtc:8603)
+ if (is_ios) {
+ public_deps += # no-presubmit-check TODO(webrtc:8603)
+ [ ":test_support_objc" ]
+ }
+
+ public_configs = [ ":test_main_direct_config" ]
+ deps = [
+ "../rtc_base:ignore_wundef",
+ "//testing/gmock",
+ "//testing/gtest",
+ ]
+}
+
+rtc_library("fixed_fps_video_frame_writer_adapter") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "testsupport/fixed_fps_video_frame_writer_adapter.cc",
+ "testsupport/fixed_fps_video_frame_writer_adapter.h",
+ ]
+ deps = [
+ ":video_test_support",
+ "../api/test/video:video_frame_writer",
+ "../api/units:time_delta",
+ "../api/video:video_frame",
+ "../rtc_base:checks",
+ "../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("video_test_support") {
+ testonly = true
+
+ sources = [
+ "testsupport/frame_reader.h",
+ "testsupport/frame_writer.h",
+ "testsupport/mock/mock_frame_reader.h",
+ "testsupport/video_frame_writer.cc",
+ "testsupport/video_frame_writer.h",
+ "testsupport/y4m_frame_reader.cc",
+ "testsupport/y4m_frame_writer.cc",
+ "testsupport/yuv_frame_reader.cc",
+ "testsupport/yuv_frame_writer.cc",
+ ]
+
+ deps = [
+ ":fileutils",
+ ":frame_utils",
+ ":test_support",
+ ":video_test_common",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/test/video:video_frame_writer",
+ "../api/video:encoded_image",
+ "../api/video:resolution",
+ "../api/video:video_frame",
+ "../api/video_codecs:video_codecs_api",
+ "../common_video",
+ "../media:rtc_media_base",
+ "../modules/video_coding:video_codec_interface",
+ "../modules/video_coding:video_coding_utility",
+ "../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_vp8",
+ "../modules/video_coding:webrtc_vp9",
+ "../rtc_base:buffer",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:logging",
+ "../rtc_base:rtc_event",
+ "../rtc_base:stringutils",
+ "../rtc_base/system:file_wrapper",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+
+ if (!is_ios) {
+ if (!build_with_mozilla) {
+ deps += [ "//third_party:jpeg" ]
+ }
+ sources += [ "testsupport/jpeg_frame_writer.cc" ]
+ } else {
+ sources += [ "testsupport/jpeg_frame_writer_ios.cc" ]
+ }
+}
+
+if (rtc_include_tests && enable_google_benchmarks) {
+ rtc_library("benchmark_main") {
+ testonly = true
+ sources = [ "benchmark_main.cc" ]
+
+ deps = [ "//third_party/google_benchmark" ]
+ }
+}
+
+if (rtc_include_tests && !build_with_chromium) {
+ rtc_library("resources_dir_flag") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "testsupport/resources_dir_flag.cc",
+ "testsupport/resources_dir_flag.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
+ }
+
+ rtc_library("test_flags") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_flags.cc",
+ "test_flags.h",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ]
+ }
+
+ rtc_library("test_main_lib") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "test_main_lib.cc",
+ "test_main_lib.h",
+ ]
+
+ deps = [
+ ":field_trial",
+ ":perf_test",
+ ":resources_dir_flag",
+ ":test_flags",
+ ":test_support",
+ "../api/test/metrics:chrome_perf_dashboard_metrics_exporter",
+ "../api/test/metrics:global_metrics_logger_and_exporter",
+ "../api/test/metrics:metrics_exporter",
+ "../api/test/metrics:metrics_set_proto_file_exporter",
+ "../api/test/metrics:print_result_proxy_metrics_exporter",
+ "../api/test/metrics:stdout_metrics_exporter",
+ "../rtc_base:checks",
+ "../rtc_base:event_tracer",
+ "../rtc_base:logging",
+ "../rtc_base:ssl",
+ "../rtc_base:threading",
+ "../system_wrappers:field_trial",
+ "../system_wrappers:metrics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+
+ if (is_win) {
+ deps += [ "../rtc_base:win32_socket_init" ]
+ }
+ }
+
+ rtc_library("test_main") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "test_main.cc" ]
+
+ deps = [
+ ":test_main_lib",
+ ":test_support",
+ ]
+
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/debugging:failure_signal_handler",
+ "//third_party/abseil-cpp/absl/debugging:symbolize",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ ]
+ }
+
+ rtc_library("test_support_test_artifacts") {
+ testonly = true
+ sources = [
+ "testsupport/test_artifacts.cc",
+ "testsupport/test_artifacts.h",
+ ]
+ deps = [
+ ":fileutils",
+ "../rtc_base:logging",
+ "../rtc_base/system:file_wrapper",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ ]
+ }
+
+ test_support_unittests_resources = [
+ "../resources/foreman_cif_short.yuv",
+ "../resources/video_coding/frame-ethernet-ii.pcap",
+ "../resources/video_coding/frame-loopback.pcap",
+ "../resources/video_coding/pltype103.rtp",
+ "../resources/video_coding/pltype103_header_only.rtp",
+ "../resources/video_coding/ssrcs-2.pcap",
+ "../resources/video_coding/ssrcs-3.pcap",
+ ]
+
+ if (is_ios) {
+ bundle_data("test_support_unittests_bundle_data") {
+ testonly = true
+ sources = test_support_unittests_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("fixed_fps_video_frame_writer_adapter_test") {
+ testonly = true
+ sources = [ "testsupport/fixed_fps_video_frame_writer_adapter_test.cc" ]
+ deps = [
+ ":fixed_fps_video_frame_writer_adapter",
+ ":test_support",
+ ":video_test_support",
+ "../api/units:time_delta",
+ "../api/units:timestamp",
+ "../api/video:video_frame",
+ "../rtc_base/synchronization:mutex",
+ "time_controller",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_test("test_support_unittests") {
+ deps = [
+ ":call_config_utils",
+ ":copy_to_file_audio_capturer_unittest",
+ ":direct_transport",
+ ":fake_video_codecs",
+ ":fileutils",
+ ":fileutils_unittests",
+ ":fixed_fps_video_frame_writer_adapter_test",
+ ":frame_generator_impl",
+ ":perf_test",
+ ":rtc_expect_death",
+ ":rtp_test_utils",
+ ":run_loop",
+ ":scoped_key_value_config",
+ ":test_main",
+ ":test_support",
+ ":test_support_test_artifacts",
+ ":video_test_common",
+ ":video_test_support",
+ "../api:array_view",
+ "../api:create_frame_generator",
+ "../api:create_simulcast_test_fixture_api",
+ "../api:frame_generator_api",
+ "../api:scoped_refptr",
+ "../api:simulcast_test_fixture_api",
+ "../api/task_queue:task_queue_test",
+ "../api/test/video:function_video_factory",
+ "../api/test/video:video_frame_writer",
+ "../api/units:time_delta",
+ "../api/video:encoded_image",
+ "../api/video:video_frame",
+ "../api/video_codecs:video_codecs_api",
+ "../call:video_stream_api",
+ "../common_video",
+ "../media:codec",
+ "../media:media_constants",
+ "../media:rtc_media_base",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../modules/video_coding:simulcast_test_fixture_impl",
+ "../modules/video_coding:video_codec_interface",
+ "../modules/video_coding:video_coding_utility",
+ "../modules/video_coding:webrtc_h264",
+ "../modules/video_coding:webrtc_vp8",
+ "../modules/video_coding:webrtc_vp9",
+ "../rtc_base:criticalsection",
+ "../rtc_base:rtc_event",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/system:file_wrapper",
+ "pc/e2e:e2e_unittests",
+ "pc/e2e/analyzer/video:video_analyzer_unittests",
+ "peer_scenario/tests",
+ "scenario:scenario_unittests",
+ "time_controller:time_controller",
+ "time_controller:time_controller_unittests",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ sources = [
+ "call_config_utils_unittest.cc",
+ "direct_transport_unittest.cc",
+ "fake_vp8_encoder_unittest.cc",
+ "frame_generator_capturer_unittest.cc",
+ "frame_generator_unittest.cc",
+ "rtp_file_reader_unittest.cc",
+ "rtp_file_writer_unittest.cc",
+ "run_loop_unittest.cc",
+ "testsupport/ivf_video_frame_generator_unittest.cc",
+ "testsupport/perf_test_unittest.cc",
+ "testsupport/test_artifacts_unittest.cc",
+ "testsupport/video_frame_writer_unittest.cc",
+ "testsupport/y4m_frame_reader_unittest.cc",
+ "testsupport/y4m_frame_writer_unittest.cc",
+ "testsupport/yuv_frame_reader_unittest.cc",
+ "testsupport/yuv_frame_writer_unittest.cc",
+ ]
+
+ if (rtc_enable_protobuf) {
+ sources += [ "testsupport/perf_test_histogram_writer_unittest.cc" ]
+ deps += [ "//third_party/catapult/tracing/tracing:histogram" ]
+ }
+
+ data = test_support_unittests_resources
+ if (is_android) {
+ deps += [ "//testing/android/native_test:native_test_support" ]
+ shard_timeout = 900
+ }
+
+ if (is_ios) {
+ deps += [ ":test_support_unittests_bundle_data" ]
+ }
+
+ if (!is_android) {
+ # This is needed in order to avoid:
+ # undefined symbol: webrtc::videocapturemodule::VideoCaptureImpl::Create
+ deps += [ "../modules/video_capture:video_capture_internal_impl" ]
+ }
+ }
+}
+
+if (is_ios) {
+ rtc_library("fileutils_ios_objc") {
+ visibility = [
+ ":fileutils",
+ ":fileutils_override_impl",
+ ]
+ sources = [
+ "testsupport/ios_file_utils.h",
+ "testsupport/ios_file_utils.mm",
+ ]
+ deps = [
+ "../rtc_base:checks",
+ "../sdk:helpers_objc",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+ }
+}
+
+if (is_mac) {
+ rtc_library("fileutils_mac_objc") {
+ visibility = [
+ ":fileutils",
+ ":fileutils_override_impl",
+ ]
+ sources = [
+ "testsupport/mac_file_utils.h",
+ "testsupport/mac_file_utils.mm",
+ ]
+ deps = [ "../rtc_base:checks" ]
+ }
+}
+
+rtc_library("fileutils") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "testsupport/file_utils.cc",
+ "testsupport/file_utils.h",
+ ]
+ deps = [
+ ":fileutils_override_api",
+ ":fileutils_override_impl",
+ "../rtc_base:checks",
+ "../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (is_ios) {
+ deps += [ ":fileutils_ios_objc" ]
+ }
+ if (is_mac) {
+ deps += [ ":fileutils_mac_objc" ]
+ }
+ if (is_win) {
+ deps += [ "../rtc_base:win32" ]
+ }
+}
+
+# We separate header into own target to make it possible for downstream
+# projects to override implementation.
+rtc_source_set("fileutils_override_api") {
+ testonly = true
+ sources = [ "testsupport/file_utils_override.h" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ]
+}
+
+rtc_library("fileutils_override_impl") {
+ testonly = true
+ visibility = [ ":fileutils" ]
+ sources = [ "testsupport/file_utils_override.cc" ]
+ deps = [
+ ":fileutils_override_api",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (is_ios) {
+ deps += [ ":fileutils_ios_objc" ]
+ }
+ if (is_mac) {
+ deps += [ ":fileutils_mac_objc" ]
+ }
+ if (is_win) {
+ deps += [ "../rtc_base:win32" ]
+ }
+}
+
+rtc_source_set("run_test") {
+ testonly = true
+ if (is_mac) {
+ public_deps = [ ":run_test_objc" ] # no-presubmit-check TODO(webrtc:8603)
+ } else {
+ public_deps = # no-presubmit-check TODO(webrtc:8603)
+ [ ":run_test_generic" ]
+ }
+}
+
+rtc_source_set("run_test_interface") {
+ sources = [ "run_test.h" ]
+}
+
+if (is_mac) {
+ rtc_library("run_test_objc") {
+ testonly = true
+ visibility = [ ":run_test" ]
+ sources = [ "mac/run_test.mm" ]
+ deps = [ ":run_test_interface" ]
+ }
+}
+
+rtc_library("run_test_generic") {
+ testonly = true
+ visibility = [ ":run_test" ]
+ sources = [ "run_test.cc" ]
+ deps = [ ":run_test_interface" ]
+}
+
+rtc_library("fileutils_unittests") {
+ testonly = true
+ visibility = [ ":*" ] # Only targets in this file can depend on this.
+ sources = [ "testsupport/file_utils_unittest.cc" ]
+ deps = [
+ ":fileutils",
+ ":test_support",
+ "../rtc_base:checks",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("direct_transport") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "direct_transport.cc",
+ "direct_transport.h",
+ ]
+ deps = [
+ "../api:rtp_parameters",
+ "../api:sequence_checker",
+ "../api:simulated_network_api",
+ "../api:transport_api",
+ "../api/task_queue",
+ "../api/units:time_delta",
+ "../call:call_interfaces",
+ "../call:simulated_packet_receiver",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../rtc_base:checks",
+ "../rtc_base:macromagic",
+ "../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
+ "../rtc_base/task_utils:repeating_task",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ public_deps = # no-presubmit-check TODO(webrtc:8603)
+ [ "../call:fake_network" ]
+}
+
+rtc_library("fake_video_codecs") {
+ allow_poison = [ "software_video_codecs" ]
+ visibility = [ "*" ]
+ sources = [
+ "configurable_frame_size_encoder.cc",
+ "configurable_frame_size_encoder.h",
+ "fake_decoder.cc",
+ "fake_decoder.h",
+ "fake_encoder.cc",
+ "fake_encoder.h",
+ "fake_vp8_decoder.cc",
+ "fake_vp8_decoder.h",
+ "fake_vp8_encoder.cc",
+ "fake_vp8_encoder.h",
+ ]
+ deps = [
+ "../api:fec_controller_api",
+ "../api:scoped_refptr",
+ "../api:sequence_checker",
+ "../api/task_queue",
+ "../api/video:encoded_image",
+ "../api/video:video_bitrate_allocation",
+ "../api/video:video_frame",
+ "../api/video:video_rtp_headers",
+ "../api/video_codecs:video_codecs_api",
+ "../api/video_codecs:vp8_temporal_layers_factory",
+ "../modules/video_coding:codec_globals_headers",
+ "../modules/video_coding:video_codec_interface",
+ "../modules/video_coding:video_coding_utility",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:macromagic",
+ "../rtc_base:rtc_task_queue",
+ "../rtc_base:timeutils",
+ "../rtc_base/synchronization:mutex",
+ "../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("null_transport") {
+ testonly = true
+ sources = [
+ "null_transport.cc",
+ "null_transport.h",
+ ]
+ deps = [ "../api:transport_api" ]
+}
+
+rtc_library("encoder_settings") {
+ testonly = true
+ sources = [
+ "encoder_settings.cc",
+ "encoder_settings.h",
+ ]
+ deps = [
+ "../api:scoped_refptr",
+ "../api/video_codecs:video_codecs_api",
+ "../call:rtp_interfaces",
+ "../call:video_stream_api",
+ "../rtc_base:checks",
+ "../rtc_base:refcount",
+ "../video/config:encoder_config",
+ ]
+}
+
+rtc_library("rtc_expect_death") {
+ testonly = true
+ sources = [ "testsupport/rtc_expect_death.h" ]
+ deps = [ ":test_support" ]
+}
+
+rtc_library("run_loop") {
+ testonly = true
+ sources = [
+ "run_loop.cc",
+ "run_loop.h",
+ ]
+ deps = [
+ "../api/task_queue",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ]
+}
+
+rtc_library("test_common") {
+ testonly = true
+ sources = [
+ "call_test.cc",
+ "call_test.h",
+ "drifting_clock.cc",
+ "drifting_clock.h",
+ "layer_filtering_transport.cc",
+ "layer_filtering_transport.h",
+ "rtp_rtcp_observer.h",
+ ]
+
+ deps = [
+ ":direct_transport",
+ ":encoder_settings",
+ ":fake_video_codecs",
+ ":fileutils",
+ ":mock_transport",
+ ":run_loop",
+ ":scoped_key_value_config",
+ ":test_support",
+ ":video_test_common",
+ "../api:array_view",
+ "../api:create_frame_generator",
+ "../api:frame_generator_api",
+ "../api:rtp_headers",
+ "../api:rtp_parameters",
+ "../api:simulated_network_api",
+ "../api:transport_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ "../api/audio_codecs:builtin_audio_encoder_factory",
+ "../api/rtc_event_log",
+ "../api/task_queue",
+ "../api/task_queue:default_task_queue_factory",
+ "../api/test/video:function_video_factory",
+ "../api/transport:field_trial_based_config",
+ "../api/units:time_delta",
+ "../api/video:builtin_video_bitrate_allocator_factory",
+ "../api/video:video_bitrate_allocator_factory",
+ "../api/video:video_frame",
+ "../api/video_codecs:video_codecs_api",
+ "../call",
+ "../call:call_interfaces",
+ "../call:fake_network",
+ "../call:simulated_network",
+ "../call:simulated_packet_receiver",
+ "../call:video_stream_api",
+ "../modules/audio_device:audio_device_impl",
+ "../modules/audio_mixer:audio_mixer_impl",
+ "../modules/rtp_rtcp",
+ "../modules/rtp_rtcp:rtp_rtcp_format",
+ "../modules/rtp_rtcp:rtp_video_header",
+ "../modules/video_coding:codec_globals_headers",
+ "../rtc_base:checks",
+ "../rtc_base:criticalsection",
+ "../rtc_base:rtc_event",
+ "../rtc_base:task_queue_for_test",
+ "../rtc_base:threading",
+ "../rtc_base:timeutils",
+ "../system_wrappers",
+ "../system_wrappers:field_trial",
+ "../video/config:encoder_config",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ if (!is_android && !build_with_chromium) {
+ deps += [ "../modules/video_capture:video_capture_internal_impl" ]
+ }
+ # This, or some form of it should be upstreamed.
+ if (!rtc_include_tests) {
+ deps -= [ "../rtc_base:task_queue_for_test" ]
+ }
+}
+
+rtc_library("mock_transport") {
+ testonly = true
+ sources = [
+ "mock_transport.cc",
+ "mock_transport.h",
+ ]
+ deps = [
+ ":test_support",
+ "../api:transport_api",
+ ]
+}
+
+rtc_source_set("test_renderer") {
+ public_deps = # no-presubmit-check TODO(webrtc:8603)
+ [ ":test_renderer_generic" ]
+ testonly = true
+ if (is_mac) {
+ public_deps += # no-presubmit-check TODO(webrtc:8603)
+ [ ":test_renderer_objc" ]
+ }
+}
+
+rtc_library("mock_frame_transformer") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [ "mock_frame_transformer.h" ]
+ deps = [
+ "../api:frame_transformer_interface",
+ "../test:test_support",
+ ]
+}
+
+rtc_library("mock_transformable_frame") {
+ visibility = [ "*" ]
+
+ testonly = true
+ sources = [ "mock_transformable_frame.h" ]
+
+ deps = [
+ "../api:frame_transformer_interface",
+ "../test:test_support",
+ ]
+}
+
+if (is_mac) {
+ rtc_library("test_renderer_objc") {
+ testonly = true
+ visibility = [ ":test_renderer" ]
+ sources = [
+ "mac/video_renderer_mac.h",
+ "mac/video_renderer_mac.mm",
+ ]
+ deps = [ ":test_renderer_generic" ]
+ frameworks = [
+ "Cocoa.framework",
+ "OpenGL.framework",
+ "CoreVideo.framework",
+ ]
+ defines = [ "GL_SILENCE_DEPRECATION" ]
+ }
+}
+
+rtc_library("test_renderer_generic") {
+ testonly = true
+ visibility = [
+ ":test_renderer",
+ ":test_renderer_objc",
+ ]
+ libs = []
+ sources = [
+ "video_renderer.cc",
+ "video_renderer.h",
+ ]
+ deps = [
+ "../api/video:video_frame",
+ "../common_video",
+ "../rtc_base:checks",
+ ]
+ if (is_win) {
+ sources += [
+ "win/d3d_renderer.cc",
+ "win/d3d_renderer.h",
+ ]
+ deps += [ "../api:scoped_refptr" ]
+ }
+ if (!((is_linux || is_chromeos) && rtc_use_x11) && !is_mac && !is_win) {
+ sources += [ "null_platform_renderer.cc" ]
+ }
+ if (((is_linux || is_chromeos) && rtc_use_x11) || is_mac) {
+ sources += [
+ "gl/gl_renderer.cc",
+ "gl/gl_renderer.h",
+ ]
+ }
+ if (is_mac) {
+ defines = [ "GL_SILENCE_DEPRECATION" ]
+ }
+
+ if ((is_linux || is_chromeos) && rtc_use_x11) {
+ sources += [
+ "linux/glx_renderer.cc",
+ "linux/glx_renderer.h",
+ "linux/video_renderer_linux.cc",
+ ]
+ libs += [
+ "Xext",
+ "X11",
+ "GL",
+ ]
+ }
+ if (is_android) {
+ libs += [
+ "GLESv2",
+ "log",
+ ]
+ }
+}
+
+rtc_library("audio_codec_mocks") {
+ testonly = true
+ sources = [
+ "mock_audio_decoder.cc",
+ "mock_audio_decoder.h",
+ "mock_audio_decoder_factory.h",
+ "mock_audio_encoder.cc",
+ "mock_audio_encoder.h",
+ "mock_audio_encoder_factory.h",
+ ]
+
+ deps = [
+ ":test_support",
+ "../api:array_view",
+ "../api:make_ref_counted",
+ "../api:scoped_refptr",
+ "../api/audio_codecs:audio_codecs_api",
+ "../api/audio_codecs:builtin_audio_decoder_factory",
+ ]
+}
+
+rtc_library("copy_to_file_audio_capturer") {
+ testonly = true
+ sources = [
+ "testsupport/copy_to_file_audio_capturer.cc",
+ "testsupport/copy_to_file_audio_capturer.h",
+ ]
+ deps = [
+ "../api:array_view",
+ "../common_audio",
+ "../modules/audio_device:audio_device_impl",
+ "../rtc_base:buffer",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("copy_to_file_audio_capturer_unittest") {
+ testonly = true
+ sources = [ "testsupport/copy_to_file_audio_capturer_unittest.cc" ]
+ deps = [
+ ":copy_to_file_audio_capturer",
+ ":fileutils",
+ ":test_support",
+ "../modules/audio_device:audio_device_impl",
+ ]
+}
+
+if (!build_with_mozilla) {
+if (!build_with_chromium && is_android) {
+ rtc_android_library("native_test_java") {
+ testonly = true
+ sources = [
+ "android/org/webrtc/native_test/RTCNativeUnitTest.java",
+ "android/org/webrtc/native_test/RTCNativeUnitTestActivity.java",
+ ]
+ deps = [
+ "../rtc_base:base_java",
+ "//testing/android/native_test:native_test_java",
+ ]
+ }
+}
+}
+
+rtc_library("call_config_utils") {
+ # TODO(bugs.webrtc.org/10814): Remove rtc_json_suppressions as soon as it
+ # gets removed upstream.
+ public_configs = [ "../rtc_base:rtc_json_suppressions" ]
+ sources = [
+ "call_config_utils.cc",
+ "call_config_utils.h",
+ ]
+ deps = [
+ "../call:video_stream_api",
+ "../rtc_base:rtc_json",
+ ]
+}
+
+rtc_library("fake_encoded_frame") {
+ testonly = true
+ sources = [
+ "fake_encoded_frame.cc",
+ "fake_encoded_frame.h",
+ ]
+ deps = [
+ ":test_support",
+ "../api:rtp_packet_info",
+ "../api/video:encoded_frame",
+ "../api/video:encoded_image",
+ "../api/video:video_frame_type",
+ "../api/video:video_rtp_headers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
diff --git a/third_party/libwebrtc/test/DEPS b/third_party/libwebrtc/test/DEPS
new file mode 100644
index 0000000000..a9e9a7b5f1
--- /dev/null
+++ b/third_party/libwebrtc/test/DEPS
@@ -0,0 +1,88 @@
+include_rules = [
+ "+third_party/libjpeg",
+ "+third_party/libjpeg_turbo",
+ "+call",
+ "+common_audio",
+ "+common_video",
+ "+logging/rtc_event_log",
+ "+media/base",
+ "+media/sctp",
+ "+media/engine",
+ "+modules/audio_coding",
+ "+modules/congestion_controller",
+ "+modules/audio_device",
+ "+modules/audio_mixer",
+ "+modules/audio_processing",
+ "+modules/congestion_controller/bbr",
+ "+modules/rtp_rtcp",
+ "+modules/utility",
+ "+modules/video_capture",
+ "+modules/video_coding",
+ "+p2p/base/basic_packet_socket_factory.h",
+ "+sdk",
+ "+system_wrappers",
+ "+third_party/libyuv",
+ "+video/config",
+]
+
+specific_include_rules = {
+ "gmock\.h": [
+ "+testing/gmock/include/gmock",
+ ],
+ "gtest\.h": [
+ "+testing/gtest/include/gtest",
+ ],
+ ".*congestion_controller_feedback_fuzzer\.cc": [
+ "+modules/congestion_controller/include/receive_side_congestion_controller.h",
+ "+modules/pacing/packet_router.h",
+ "+modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h",
+ ],
+ ".*mdns_parser_fuzzer\.cc": [
+ "+p2p/base/mdns_message.h",
+ ],
+ ".*pseudotcp_parser_fuzzer\.cc": [
+ "+p2p/base/pseudo_tcp.h",
+ ],
+ ".*stun_parser_fuzzer\.cc": [
+ "+p2p/base/stun.h",
+ ],
+ ".*stun_validator_fuzzer\.cc": [
+ "+p2p/base/stun.h",
+ ],
+ ".*test_main\.cc": [
+ "+absl/debugging/failure_signal_handler.h",
+ "+absl/debugging/symbolize.h",
+ ],
+ ".*test_peer\.(h|cc)": [
+ "+pc",
+ "+p2p",
+ ],
+ ".*test_peer_factory\.(h|cc)": [
+ "+pc",
+ "+p2p",
+ ],
+ ".*peer_connection_quality_test_params\.h": [
+ "+p2p/base/port_allocator.h",
+ ],
+ ".*network_emulation_pc_unittest\.cc": [
+ "+pc/peer_connection_wrapper.h",
+ "+pc/test/mock_peer_connection_observers.h",
+ "+p2p/client/basic_port_allocator.h",
+ ],
+ ".*peer_connection_quality_test\.(h|cc)": [
+ "+pc",
+ ],
+ ".*sdp_changer\.(h|cc)": [
+ "+pc",
+ "+p2p",
+ ],
+ ".*test_video_capturer_video_track_source.h": [
+ "+pc",
+ ],
+ "benchmark_main\.cc": [
+ "+benchmark",
+ ],
+ "emulated_turn_server\.h": [
+ "+p2p/base/turn_server.h",
+ ]
+}
diff --git a/third_party/libwebrtc/test/OWNERS b/third_party/libwebrtc/test/OWNERS
new file mode 100644
index 0000000000..a1bd812244
--- /dev/null
+++ b/third_party/libwebrtc/test/OWNERS
@@ -0,0 +1,7 @@
+sprang@webrtc.org
+srte@webrtc.org
+stefan@webrtc.org
+titovartem@webrtc.org
+landrey@webrtc.org
+mbonadei@webrtc.org
+jleconte@webrtc.org
diff --git a/third_party/libwebrtc/test/android/AndroidManifest.xml b/third_party/libwebrtc/test/android/AndroidManifest.xml
new file mode 100644
index 0000000000..ad3f434b4f
--- /dev/null
+++ b/third_party/libwebrtc/test/android/AndroidManifest.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+Copyright 2017 The WebRTC project authors. All Rights Reserved.
+
+Use of this source code is governed by a BSD-style license
+that can be found in the LICENSE file in the root of the source
+tree. An additional intellectual property rights grant can be found
+in the file PATENTS. All contributing project authors may
+be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="org.webrtc.native_test"
+ android:versionCode="1"
+ android:versionName="1.0">
+
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="23" />
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
+ <uses-permission android:name="android.permission.BLUETOOTH"/>
+ <uses-permission android:name="android.permission.BLUETOOTH_ADMIN"/>
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.INTERNET"/>
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
+ <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+ <uses-permission android:name="android.permission.WAKE_LOCK"/>
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+
+ <application android:label="NativeTests"
+ android:name="org.chromium.native_test.NativeTestApplication">
+ <uses-library android:name="android.test.runner"/>
+ <activity android:name=".RTCNativeUnitTestActivity"
+ android:label="NativeTest"
+ android:configChanges="orientation|keyboardHidden"
+ android:process=":test_process">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ </application>
+
+ <instrumentation android:name="org.chromium.build.gtest_apk.NativeTestInstrumentationTestRunner"
+ android:targetPackage="org.webrtc.native_test"
+ android:label="Instrumentation entry point for org.webrtc.native_test"
+ chromium-junit3="true"/>
+
+</manifest>
diff --git a/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTest.java b/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTest.java
new file mode 100644
index 0000000000..dede7edd1f
--- /dev/null
+++ b/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTest.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.native_test;
+
+import android.app.Activity;
+import org.chromium.native_test.NativeUnitTest;
+import org.webrtc.ContextUtils;
+
+/**
+ * Native unit test that calls ContextUtils.initialize for WebRTC.
+ */
+public class RTCNativeUnitTest extends NativeUnitTest {
+ @Override
+ public void preCreate(Activity activity) {
+ super.preCreate(activity);
+ ContextUtils.initialize(activity.getApplicationContext());
+ }
+}
diff --git a/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTestActivity.java b/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTestActivity.java
new file mode 100644
index 0000000000..2a413682fe
--- /dev/null
+++ b/third_party/libwebrtc/test/android/org/webrtc/native_test/RTCNativeUnitTestActivity.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.native_test;
+
+import android.app.Activity;
+import android.os.Bundle;
+
+/**
+ * Activity that uses RTCNativeUnitTest to run the tests.
+ */
+public class RTCNativeUnitTestActivity extends Activity {
+ private RTCNativeUnitTest mTest = new RTCNativeUnitTest();
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ mTest.preCreate(this);
+ super.onCreate(savedInstanceState);
+ mTest.postCreate(this);
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
+ mTest.postStart(this, false);
+ }
+}
diff --git a/third_party/libwebrtc/test/audio_decoder_proxy_factory.h b/third_party/libwebrtc/test/audio_decoder_proxy_factory.h
new file mode 100644
index 0000000000..95606d6ff7
--- /dev/null
+++ b/third_party/libwebrtc/test/audio_decoder_proxy_factory.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_AUDIO_DECODER_PROXY_FACTORY_H_
+#define TEST_AUDIO_DECODER_PROXY_FACTORY_H_
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder.h"
+#include "api/audio_codecs/audio_decoder_factory.h"
+
+namespace webrtc {
+namespace test {
+
+// A decoder factory with a single underlying AudioDecoder object, intended for
+// test purposes. Each call to MakeAudioDecoder returns a proxy for the same
+// decoder, typically a mock or fake decoder.
+class AudioDecoderProxyFactory : public AudioDecoderFactory {
+ public:
+ explicit AudioDecoderProxyFactory(AudioDecoder* decoder)
+ : decoder_(decoder) {}
+
+ // Unused by tests.
+ std::vector<AudioCodecSpec> GetSupportedDecoders() override {
+ RTC_DCHECK_NOTREACHED();
+ return {};
+ }
+
+ bool IsSupportedDecoder(const SdpAudioFormat& format) override {
+ return true;
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& /* format */,
+ absl::optional<AudioCodecPairId> /* codec_pair_id */) override {
+ return std::make_unique<DecoderProxy>(decoder_);
+ }
+
+ private:
+ // Wrapper class, since CreateAudioDecoder needs to surrender
+ // ownership to the object it returns.
+ class DecoderProxy final : public AudioDecoder {
+ public:
+ explicit DecoderProxy(AudioDecoder* decoder) : decoder_(decoder) {}
+
+ private:
+ std::vector<ParseResult> ParsePayload(rtc::Buffer&& payload,
+ uint32_t timestamp) override {
+ return decoder_->ParsePayload(std::move(payload), timestamp);
+ }
+
+ bool HasDecodePlc() const override { return decoder_->HasDecodePlc(); }
+
+ int ErrorCode() override { return decoder_->ErrorCode(); }
+
+ void Reset() override { decoder_->Reset(); }
+
+ int SampleRateHz() const override { return decoder_->SampleRateHz(); }
+
+ size_t Channels() const override { return decoder_->Channels(); }
+
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override {
+ // Needed for tests of NetEqImpl::DecodeCng, which calls the deprecated
+ // Decode method.
+ size_t max_decoded_bytes =
+ decoder_->PacketDuration(encoded, encoded_len) *
+ decoder_->Channels() * sizeof(int16_t);
+ return decoder_->Decode(encoded, encoded_len, sample_rate_hz,
+ max_decoded_bytes, decoded, speech_type);
+ }
+
+ void GeneratePlc(size_t requested_samples_per_channel,
+ rtc::BufferT<int16_t>* concealment_audio) override {
+ decoder_->GeneratePlc(requested_samples_per_channel, concealment_audio);
+ }
+
+ AudioDecoder* const decoder_;
+ };
+
+ AudioDecoder* const decoder_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_AUDIO_DECODER_PROXY_FACTORY_H_
diff --git a/third_party/libwebrtc/test/benchmark_main.cc b/third_party/libwebrtc/test/benchmark_main.cc
new file mode 100644
index 0000000000..1a79c24913
--- /dev/null
+++ b/third_party/libwebrtc/test/benchmark_main.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark/benchmark.h"
+
+int main(int argc, char* argv[]) {
+ benchmark::Initialize(&argc, argv);
+ benchmark::RunSpecifiedBenchmarks();
+ return 0;
+}
diff --git a/third_party/libwebrtc/test/call_config_utils.cc b/third_party/libwebrtc/test/call_config_utils.cc
new file mode 100644
index 0000000000..da3d76c689
--- /dev/null
+++ b/third_party/libwebrtc/test/call_config_utils.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/call_config_utils.h"
+
+#include <string>
+#include <vector>
+
+namespace webrtc {
+namespace test {
+
+// Deserializes a JSON representation of the VideoReceiveStreamInterface::Config
+// back into a valid object. This will not initialize the decoders or the
+// renderer.
+VideoReceiveStreamInterface::Config ParseVideoReceiveStreamJsonConfig(
+ webrtc::Transport* transport,
+ const Json::Value& json) {
+ auto receive_config = VideoReceiveStreamInterface::Config(transport);
+ for (const auto& decoder_json : json["decoders"]) {
+ VideoReceiveStreamInterface::Decoder decoder;
+ decoder.video_format =
+ SdpVideoFormat(decoder_json["payload_name"].asString());
+ decoder.payload_type = decoder_json["payload_type"].asInt64();
+ for (const auto& params_json : decoder_json["codec_params"]) {
+ std::vector<std::string> members = params_json.getMemberNames();
+ RTC_CHECK_EQ(members.size(), 1);
+ decoder.video_format.parameters[members[0]] =
+ params_json[members[0]].asString();
+ }
+ receive_config.decoders.push_back(decoder);
+ }
+ receive_config.render_delay_ms = json["render_delay_ms"].asInt64();
+ receive_config.rtp.remote_ssrc = json["rtp"]["remote_ssrc"].asInt64();
+ receive_config.rtp.local_ssrc = json["rtp"]["local_ssrc"].asInt64();
+ receive_config.rtp.rtcp_mode =
+ json["rtp"]["rtcp_mode"].asString() == "RtcpMode::kCompound"
+ ? RtcpMode::kCompound
+ : RtcpMode::kReducedSize;
+ receive_config.rtp.lntf.enabled = json["rtp"]["lntf"]["enabled"].asInt64();
+ receive_config.rtp.nack.rtp_history_ms =
+ json["rtp"]["nack"]["rtp_history_ms"].asInt64();
+ receive_config.rtp.ulpfec_payload_type =
+ json["rtp"]["ulpfec_payload_type"].asInt64();
+ receive_config.rtp.red_payload_type =
+ json["rtp"]["red_payload_type"].asInt64();
+ receive_config.rtp.rtx_ssrc = json["rtp"]["rtx_ssrc"].asInt64();
+
+ for (const auto& pl_json : json["rtp"]["rtx_payload_types"]) {
+ std::vector<std::string> members = pl_json.getMemberNames();
+ RTC_CHECK_EQ(members.size(), 1);
+ Json::Value rtx_payload_type = pl_json[members[0]];
+ receive_config.rtp.rtx_associated_payload_types[std::stoi(members[0])] =
+ rtx_payload_type.asInt64();
+ }
+ for (const auto& ext_json : json["rtp"]["extensions"]) {
+ receive_config.rtp.extensions.emplace_back(ext_json["uri"].asString(),
+ ext_json["id"].asInt64(),
+ ext_json["encrypt"].asBool());
+ }
+ return receive_config;
+}
+
+Json::Value GenerateVideoReceiveStreamJsonConfig(
+ const VideoReceiveStreamInterface::Config& config) {
+ Json::Value root_json;
+
+ root_json["decoders"] = Json::Value(Json::arrayValue);
+ for (const auto& decoder : config.decoders) {
+ Json::Value decoder_json;
+ decoder_json["payload_type"] = decoder.payload_type;
+ decoder_json["payload_name"] = decoder.video_format.name;
+ decoder_json["codec_params"] = Json::Value(Json::arrayValue);
+ for (const auto& codec_param_entry : decoder.video_format.parameters) {
+ Json::Value codec_param_json;
+ codec_param_json[codec_param_entry.first] = codec_param_entry.second;
+ decoder_json["codec_params"].append(codec_param_json);
+ }
+ root_json["decoders"].append(decoder_json);
+ }
+
+ Json::Value rtp_json;
+ rtp_json["remote_ssrc"] = config.rtp.remote_ssrc;
+ rtp_json["local_ssrc"] = config.rtp.local_ssrc;
+ rtp_json["rtcp_mode"] = config.rtp.rtcp_mode == RtcpMode::kCompound
+ ? "RtcpMode::kCompound"
+ : "RtcpMode::kReducedSize";
+ rtp_json["lntf"]["enabled"] = config.rtp.lntf.enabled;
+ rtp_json["nack"]["rtp_history_ms"] = config.rtp.nack.rtp_history_ms;
+ rtp_json["ulpfec_payload_type"] = config.rtp.ulpfec_payload_type;
+ rtp_json["red_payload_type"] = config.rtp.red_payload_type;
+ rtp_json["rtx_ssrc"] = config.rtp.rtx_ssrc;
+ rtp_json["rtx_payload_types"] = Json::Value(Json::arrayValue);
+
+ for (auto& kv : config.rtp.rtx_associated_payload_types) {
+ Json::Value val;
+ val[std::to_string(kv.first)] = kv.second;
+ rtp_json["rtx_payload_types"].append(val);
+ }
+
+ rtp_json["extensions"] = Json::Value(Json::arrayValue);
+ for (auto& ext : config.rtp.extensions) {
+ Json::Value ext_json;
+ ext_json["uri"] = ext.uri;
+ ext_json["id"] = ext.id;
+ ext_json["encrypt"] = ext.encrypt;
+ rtp_json["extensions"].append(ext_json);
+ }
+ root_json["rtp"] = rtp_json;
+
+ root_json["render_delay_ms"] = config.render_delay_ms;
+
+ return root_json;
+}
+
+} // namespace test.
+} // namespace webrtc.
diff --git a/third_party/libwebrtc/test/call_config_utils.h b/third_party/libwebrtc/test/call_config_utils.h
new file mode 100644
index 0000000000..97cfdc3396
--- /dev/null
+++ b/third_party/libwebrtc/test/call_config_utils.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_CALL_CONFIG_UTILS_H_
+#define TEST_CALL_CONFIG_UTILS_H_
+
+#include "call/video_receive_stream.h"
+#include "rtc_base/strings/json.h"
+
+namespace webrtc {
+namespace test {
+
+// Deserializes a JSON representation of the VideoReceiveStreamInterface::Config
+// back into a valid object. This will not initialize the decoders or the
+// renderer.
+VideoReceiveStreamInterface::Config ParseVideoReceiveStreamJsonConfig(
+ webrtc::Transport* transport,
+ const Json::Value& json);
+
+// Serialize a VideoReceiveStreamInterface::Config into a Json object.
+Json::Value GenerateVideoReceiveStreamJsonConfig(
+ const VideoReceiveStreamInterface::Config& config);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_CALL_CONFIG_UTILS_H_
diff --git a/third_party/libwebrtc/test/call_config_utils_unittest.cc b/third_party/libwebrtc/test/call_config_utils_unittest.cc
new file mode 100644
index 0000000000..e010ab6707
--- /dev/null
+++ b/third_party/libwebrtc/test/call_config_utils_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/call_config_utils.h"
+
+#include "call/video_receive_stream.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+TEST(CallConfigUtils, MarshalUnmarshalProcessSameObject) {
+ VideoReceiveStreamInterface::Config recv_config(nullptr);
+
+ VideoReceiveStreamInterface::Decoder decoder;
+ decoder.payload_type = 10;
+ decoder.video_format.name = "test";
+ decoder.video_format.parameters["99"] = "b";
+ recv_config.decoders.push_back(decoder);
+ recv_config.render_delay_ms = 10;
+ recv_config.rtp.remote_ssrc = 100;
+ recv_config.rtp.local_ssrc = 101;
+ recv_config.rtp.rtcp_mode = RtcpMode::kCompound;
+ recv_config.rtp.lntf.enabled = false;
+ recv_config.rtp.nack.rtp_history_ms = 150;
+ recv_config.rtp.red_payload_type = 50;
+ recv_config.rtp.rtx_ssrc = 1000;
+ recv_config.rtp.rtx_associated_payload_types[10] = 10;
+ recv_config.rtp.extensions.emplace_back("uri", 128, true);
+
+ VideoReceiveStreamInterface::Config unmarshaled_config =
+ ParseVideoReceiveStreamJsonConfig(
+ nullptr, GenerateVideoReceiveStreamJsonConfig(recv_config));
+
+ EXPECT_EQ(recv_config.decoders[0].payload_type,
+ unmarshaled_config.decoders[0].payload_type);
+ EXPECT_EQ(recv_config.decoders[0].video_format.name,
+ unmarshaled_config.decoders[0].video_format.name);
+ EXPECT_EQ(recv_config.decoders[0].video_format.parameters,
+ unmarshaled_config.decoders[0].video_format.parameters);
+ EXPECT_EQ(recv_config.render_delay_ms, unmarshaled_config.render_delay_ms);
+ EXPECT_EQ(recv_config.rtp.remote_ssrc, unmarshaled_config.rtp.remote_ssrc);
+ EXPECT_EQ(recv_config.rtp.local_ssrc, unmarshaled_config.rtp.local_ssrc);
+ EXPECT_EQ(recv_config.rtp.rtcp_mode, unmarshaled_config.rtp.rtcp_mode);
+ EXPECT_EQ(recv_config.rtp.lntf.enabled, unmarshaled_config.rtp.lntf.enabled);
+ EXPECT_EQ(recv_config.rtp.nack.rtp_history_ms,
+ unmarshaled_config.rtp.nack.rtp_history_ms);
+ EXPECT_EQ(recv_config.rtp.red_payload_type,
+ unmarshaled_config.rtp.red_payload_type);
+ EXPECT_EQ(recv_config.rtp.rtx_ssrc, unmarshaled_config.rtp.rtx_ssrc);
+ EXPECT_EQ(recv_config.rtp.rtx_associated_payload_types,
+ unmarshaled_config.rtp.rtx_associated_payload_types);
+ EXPECT_EQ(recv_config.rtp.extensions, recv_config.rtp.extensions);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/call_test.cc b/third_party/libwebrtc/test/call_test.cc
new file mode 100644
index 0000000000..62d18394f9
--- /dev/null
+++ b/third_party/libwebrtc/test/call_test.cc
@@ -0,0 +1,861 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/call_test.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/create_frame_generator.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "call/fake_network_pipe.h"
+#include "call/packet_receiver.h"
+#include "call/simulated_network.h"
+#include "modules/audio_mixer/audio_mixer_impl.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/fake_encoder.h"
+#include "test/rtp_rtcp_observer.h"
+#include "test/testsupport/file_utils.h"
+#include "video/config/video_encoder_config.h"
+
+namespace webrtc {
+namespace test {
+
+CallTest::CallTest()
+ : clock_(Clock::GetRealTimeClock()),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ send_event_log_(std::make_unique<RtcEventLogNull>()),
+ recv_event_log_(std::make_unique<RtcEventLogNull>()),
+ audio_send_config_(/*send_transport=*/nullptr),
+ audio_send_stream_(nullptr),
+ frame_generator_capturer_(nullptr),
+ fake_encoder_factory_([this]() {
+ std::unique_ptr<FakeEncoder> fake_encoder;
+ if (video_encoder_configs_[0].codec_type == kVideoCodecVP8) {
+ fake_encoder = std::make_unique<FakeVp8Encoder>(clock_);
+ } else {
+ fake_encoder = std::make_unique<FakeEncoder>(clock_);
+ }
+ fake_encoder->SetMaxBitrate(fake_encoder_max_bitrate_);
+ return fake_encoder;
+ }),
+ fake_decoder_factory_([]() { return std::make_unique<FakeDecoder>(); }),
+ bitrate_allocator_factory_(CreateBuiltinVideoBitrateAllocatorFactory()),
+ num_video_streams_(1),
+ num_audio_streams_(0),
+ num_flexfec_streams_(0),
+ audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()),
+ audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()),
+ task_queue_(task_queue_factory_->CreateTaskQueue(
+ "CallTestTaskQueue",
+ TaskQueueFactory::Priority::NORMAL)) {}
+
+CallTest::~CallTest() = default;
+
+void CallTest::RegisterRtpExtension(const RtpExtension& extension) {
+ for (const RtpExtension& registered_extension : rtp_extensions_) {
+ if (registered_extension.id == extension.id) {
+ ASSERT_EQ(registered_extension.uri, extension.uri)
+ << "Different URIs associated with ID " << extension.id << ".";
+ ASSERT_EQ(registered_extension.encrypt, extension.encrypt)
+ << "Encryption mismatch associated with ID " << extension.id << ".";
+ return;
+ } else { // Different IDs.
+ // Different IDs referring to the same extension probably indicate
+ // a mistake in the test.
+ ASSERT_FALSE(registered_extension.uri == extension.uri &&
+ registered_extension.encrypt == extension.encrypt)
+ << "URI " << extension.uri
+ << (extension.encrypt ? " with " : " without ")
+ << "encryption already registered with a different "
+ "ID ("
+ << extension.id << " vs. " << registered_extension.id << ").";
+ }
+ }
+ rtp_extensions_.push_back(extension);
+}
+
+void CallTest::RunBaseTest(BaseTest* test) {
+ SendTask(task_queue(), [this, test]() {
+ num_video_streams_ = test->GetNumVideoStreams();
+ num_audio_streams_ = test->GetNumAudioStreams();
+ num_flexfec_streams_ = test->GetNumFlexfecStreams();
+ RTC_DCHECK(num_video_streams_ > 0 || num_audio_streams_ > 0);
+ Call::Config send_config(send_event_log_.get());
+ test->ModifySenderBitrateConfig(&send_config.bitrate_config);
+ if (num_audio_streams_ > 0) {
+ CreateFakeAudioDevices(test->CreateCapturer(), test->CreateRenderer());
+ test->OnFakeAudioDevicesCreated(fake_send_audio_device_.get(),
+ fake_recv_audio_device_.get());
+ apm_send_ = AudioProcessingBuilder().Create();
+ apm_recv_ = AudioProcessingBuilder().Create();
+ EXPECT_EQ(0, fake_send_audio_device_->Init());
+ EXPECT_EQ(0, fake_recv_audio_device_->Init());
+ AudioState::Config audio_state_config;
+ audio_state_config.audio_mixer = AudioMixerImpl::Create();
+ audio_state_config.audio_processing = apm_send_;
+ audio_state_config.audio_device_module = fake_send_audio_device_;
+ send_config.audio_state = AudioState::Create(audio_state_config);
+ fake_send_audio_device_->RegisterAudioCallback(
+ send_config.audio_state->audio_transport());
+ }
+ CreateSenderCall(send_config);
+ if (test->ShouldCreateReceivers()) {
+ Call::Config recv_config(recv_event_log_.get());
+ test->ModifyReceiverBitrateConfig(&recv_config.bitrate_config);
+ if (num_audio_streams_ > 0) {
+ AudioState::Config audio_state_config;
+ audio_state_config.audio_mixer = AudioMixerImpl::Create();
+ audio_state_config.audio_processing = apm_recv_;
+ audio_state_config.audio_device_module = fake_recv_audio_device_;
+ recv_config.audio_state = AudioState::Create(audio_state_config);
+ fake_recv_audio_device_->RegisterAudioCallback(
+ recv_config.audio_state->audio_transport());
+ }
+ CreateReceiverCall(recv_config);
+ }
+ test->OnCallsCreated(sender_call_.get(), receiver_call_.get());
+ CreateReceiveTransport(test->GetReceiveTransportConfig(), test);
+ CreateSendTransport(test->GetSendTransportConfig(), test);
+ test->OnTransportCreated(send_transport_.get(), send_simulated_network_,
+ receive_transport_.get(),
+ receive_simulated_network_);
+ if (test->ShouldCreateReceivers()) {
+ if (num_video_streams_ > 0)
+ receiver_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ if (num_audio_streams_ > 0)
+ receiver_call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ } else {
+ // Sender-only call delivers to itself.
+ send_transport_->SetReceiver(sender_call_->Receiver());
+ receive_transport_->SetReceiver(nullptr);
+ }
+
+ CreateSendConfig(num_video_streams_, num_audio_streams_,
+ num_flexfec_streams_, send_transport_.get());
+ if (test->ShouldCreateReceivers()) {
+ CreateMatchingReceiveConfigs();
+ }
+ if (num_video_streams_ > 0) {
+ test->ModifyVideoConfigs(GetVideoSendConfig(), &video_receive_configs_,
+ GetVideoEncoderConfig());
+ }
+ if (num_audio_streams_ > 0) {
+ test->ModifyAudioConfigs(&audio_send_config_, &audio_receive_configs_);
+ }
+ if (num_flexfec_streams_ > 0) {
+ test->ModifyFlexfecConfigs(&flexfec_receive_configs_);
+ }
+
+ if (num_flexfec_streams_ > 0) {
+ CreateFlexfecStreams();
+ test->OnFlexfecStreamsCreated(flexfec_receive_streams_);
+ }
+ if (num_video_streams_ > 0) {
+ CreateVideoStreams();
+ test->OnVideoStreamsCreated(GetVideoSendStream(), video_receive_streams_);
+ }
+ if (num_audio_streams_ > 0) {
+ CreateAudioStreams();
+ test->OnAudioStreamsCreated(audio_send_stream_, audio_receive_streams_);
+ }
+
+ if (num_video_streams_ > 0) {
+ int width = kDefaultWidth;
+ int height = kDefaultHeight;
+ int frame_rate = kDefaultFramerate;
+ test->ModifyVideoCaptureStartResolution(&width, &height, &frame_rate);
+ test->ModifyVideoDegradationPreference(&degradation_preference_);
+ CreateFrameGeneratorCapturer(frame_rate, width, height);
+ test->OnFrameGeneratorCapturerCreated(frame_generator_capturer_);
+ }
+
+ Start();
+ });
+
+ test->PerformTest();
+
+ SendTask(task_queue(), [this, test]() {
+ Stop();
+ test->OnStreamsStopped();
+ DestroyStreams();
+ send_transport_.reset();
+ receive_transport_.reset();
+
+ frame_generator_capturer_ = nullptr;
+ DestroyCalls();
+
+ fake_send_audio_device_ = nullptr;
+ fake_recv_audio_device_ = nullptr;
+ });
+}
+
+void CallTest::CreateCalls() {
+ CreateCalls(Call::Config(send_event_log_.get()),
+ Call::Config(recv_event_log_.get()));
+}
+
+void CallTest::CreateCalls(const Call::Config& sender_config,
+ const Call::Config& receiver_config) {
+ CreateSenderCall(sender_config);
+ CreateReceiverCall(receiver_config);
+}
+
+void CallTest::CreateSenderCall() {
+ CreateSenderCall(Call::Config(send_event_log_.get()));
+}
+
+void CallTest::CreateSenderCall(const Call::Config& config) {
+ auto sender_config = config;
+ sender_config.task_queue_factory = task_queue_factory_.get();
+ sender_config.network_state_predictor_factory =
+ network_state_predictor_factory_.get();
+ sender_config.network_controller_factory = network_controller_factory_.get();
+ sender_config.trials = &field_trials_;
+ sender_call_.reset(Call::Create(sender_config));
+}
+
+void CallTest::CreateReceiverCall(const Call::Config& config) {
+ auto receiver_config = config;
+ receiver_config.task_queue_factory = task_queue_factory_.get();
+ receiver_config.trials = &field_trials_;
+ receiver_call_.reset(Call::Create(receiver_config));
+}
+
+void CallTest::DestroyCalls() {
+ send_transport_.reset();
+ receive_transport_.reset();
+ sender_call_.reset();
+ receiver_call_.reset();
+}
+
+void CallTest::CreateVideoSendConfig(VideoSendStream::Config* video_config,
+ size_t num_video_streams,
+ size_t num_used_ssrcs,
+ Transport* send_transport) {
+ RTC_DCHECK_LE(num_video_streams + num_used_ssrcs, kNumSsrcs);
+ *video_config = VideoSendStream::Config(send_transport);
+ video_config->encoder_settings.encoder_factory = &fake_encoder_factory_;
+ video_config->encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory_.get();
+ video_config->rtp.payload_name = "FAKE";
+ video_config->rtp.payload_type = kFakeVideoSendPayloadType;
+ video_config->rtp.extmap_allow_mixed = true;
+ AddRtpExtensionByUri(RtpExtension::kTransportSequenceNumberUri,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kAbsSendTimeUri,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kTimestampOffsetUri,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kVideoContentTypeUri,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kGenericFrameDescriptorUri00,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kDependencyDescriptorUri,
+ &video_config->rtp.extensions);
+ if (video_encoder_configs_.empty()) {
+ video_encoder_configs_.emplace_back();
+ FillEncoderConfiguration(kVideoCodecGeneric, num_video_streams,
+ &video_encoder_configs_.back());
+ }
+ for (size_t i = 0; i < num_video_streams; ++i)
+ video_config->rtp.ssrcs.push_back(kVideoSendSsrcs[num_used_ssrcs + i]);
+ AddRtpExtensionByUri(RtpExtension::kVideoRotationUri,
+ &video_config->rtp.extensions);
+ AddRtpExtensionByUri(RtpExtension::kColorSpaceUri,
+ &video_config->rtp.extensions);
+}
+
+void CallTest::CreateAudioAndFecSendConfigs(size_t num_audio_streams,
+ size_t num_flexfec_streams,
+ Transport* send_transport) {
+ RTC_DCHECK_LE(num_audio_streams, 1);
+ RTC_DCHECK_LE(num_flexfec_streams, 1);
+ if (num_audio_streams > 0) {
+ AudioSendStream::Config audio_send_config(send_transport);
+ audio_send_config.rtp.ssrc = kAudioSendSsrc;
+ AddRtpExtensionByUri(RtpExtension::kTransportSequenceNumberUri,
+ &audio_send_config.rtp.extensions);
+
+ audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
+ kAudioSendPayloadType, {"opus", 48000, 2, {{"stereo", "1"}}});
+ audio_send_config.encoder_factory = audio_encoder_factory_;
+ SetAudioConfig(audio_send_config);
+ }
+
+ // TODO(brandtr): Update this when we support multistream protection.
+ if (num_flexfec_streams > 0) {
+ SetSendFecConfig({kVideoSendSsrcs[0]});
+ }
+}
+
+void CallTest::SetAudioConfig(const AudioSendStream::Config& config) {
+ audio_send_config_ = config;
+}
+
+void CallTest::SetSendFecConfig(std::vector<uint32_t> video_send_ssrcs) {
+ GetVideoSendConfig()->rtp.flexfec.payload_type = kFlexfecPayloadType;
+ GetVideoSendConfig()->rtp.flexfec.ssrc = kFlexfecSendSsrc;
+ GetVideoSendConfig()->rtp.flexfec.protected_media_ssrcs = video_send_ssrcs;
+}
+
+void CallTest::SetSendUlpFecConfig(VideoSendStream::Config* send_config) {
+ send_config->rtp.ulpfec.red_payload_type = kRedPayloadType;
+ send_config->rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
+ send_config->rtp.ulpfec.red_rtx_payload_type = kRtxRedPayloadType;
+}
+
+void CallTest::SetReceiveUlpFecConfig(
+ VideoReceiveStreamInterface::Config* receive_config) {
+ receive_config->rtp.red_payload_type = kRedPayloadType;
+ receive_config->rtp.ulpfec_payload_type = kUlpfecPayloadType;
+ receive_config->rtp.rtx_associated_payload_types[kRtxRedPayloadType] =
+ kRedPayloadType;
+}
+
+void CallTest::CreateSendConfig(size_t num_video_streams,
+ size_t num_audio_streams,
+ size_t num_flexfec_streams,
+ Transport* send_transport) {
+ if (num_video_streams > 0) {
+ video_send_configs_.clear();
+ video_send_configs_.emplace_back(nullptr);
+ CreateVideoSendConfig(&video_send_configs_.back(), num_video_streams, 0,
+ send_transport);
+ }
+ CreateAudioAndFecSendConfigs(num_audio_streams, num_flexfec_streams,
+ send_transport);
+}
+
+void CallTest::CreateMatchingVideoReceiveConfigs(
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport) {
+ CreateMatchingVideoReceiveConfigs(video_send_config, rtcp_send_transport,
+ &fake_decoder_factory_, absl::nullopt,
+ false, 0);
+}
+
+void CallTest::CreateMatchingVideoReceiveConfigs(
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory,
+ absl::optional<size_t> decode_sub_stream,
+ bool receiver_reference_time_report,
+ int rtp_history_ms) {
+ AddMatchingVideoReceiveConfigs(
+ &video_receive_configs_, video_send_config, rtcp_send_transport,
+ decoder_factory, decode_sub_stream, receiver_reference_time_report,
+ rtp_history_ms);
+}
+
+void CallTest::AddMatchingVideoReceiveConfigs(
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory,
+ absl::optional<size_t> decode_sub_stream,
+ bool receiver_reference_time_report,
+ int rtp_history_ms) {
+ RTC_DCHECK(!video_send_config.rtp.ssrcs.empty());
+ VideoReceiveStreamInterface::Config default_config(rtcp_send_transport);
+ default_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
+ for (const RtpExtension& extension : video_send_config.rtp.extensions)
+ default_config.rtp.extensions.push_back(extension);
+ default_config.rtp.nack.rtp_history_ms = rtp_history_ms;
+ // Enable RTT calculation so NTP time estimator will work.
+ default_config.rtp.rtcp_xr.receiver_reference_time_report =
+ receiver_reference_time_report;
+ default_config.renderer = &fake_renderer_;
+
+ for (size_t i = 0; i < video_send_config.rtp.ssrcs.size(); ++i) {
+ VideoReceiveStreamInterface::Config video_recv_config(
+ default_config.Copy());
+ video_recv_config.decoders.clear();
+ if (!video_send_config.rtp.rtx.ssrcs.empty()) {
+ video_recv_config.rtp.rtx_ssrc = video_send_config.rtp.rtx.ssrcs[i];
+ video_recv_config.rtp.rtx_associated_payload_types[kSendRtxPayloadType] =
+ video_send_config.rtp.payload_type;
+ }
+ video_recv_config.rtp.remote_ssrc = video_send_config.rtp.ssrcs[i];
+ VideoReceiveStreamInterface::Decoder decoder;
+
+ decoder.payload_type = video_send_config.rtp.payload_type;
+ decoder.video_format = SdpVideoFormat(video_send_config.rtp.payload_name);
+ // Force fake decoders on non-selected simulcast streams.
+ if (!decode_sub_stream || i == *decode_sub_stream) {
+ video_recv_config.decoder_factory = decoder_factory;
+ } else {
+ video_recv_config.decoder_factory = &fake_decoder_factory_;
+ }
+ video_recv_config.decoders.push_back(decoder);
+ receive_configs->emplace_back(std::move(video_recv_config));
+ }
+}
+
+void CallTest::CreateMatchingAudioAndFecConfigs(
+ Transport* rtcp_send_transport) {
+ RTC_DCHECK_GE(1, num_audio_streams_);
+ if (num_audio_streams_ == 1) {
+ CreateMatchingAudioConfigs(rtcp_send_transport, "");
+ }
+
+ // TODO(brandtr): Update this when we support multistream protection.
+ RTC_DCHECK(num_flexfec_streams_ <= 1);
+ if (num_flexfec_streams_ == 1) {
+ CreateMatchingFecConfig(rtcp_send_transport, *GetVideoSendConfig());
+ for (const RtpExtension& extension : GetVideoSendConfig()->rtp.extensions)
+ GetFlexFecConfig()->rtp.extensions.push_back(extension);
+ }
+}
+
+void CallTest::CreateMatchingAudioConfigs(Transport* transport,
+ std::string sync_group) {
+ audio_receive_configs_.push_back(CreateMatchingAudioConfig(
+ audio_send_config_, audio_decoder_factory_, transport, sync_group));
+}
+
+AudioReceiveStreamInterface::Config CallTest::CreateMatchingAudioConfig(
+ const AudioSendStream::Config& send_config,
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
+ Transport* transport,
+ std::string sync_group) {
+ AudioReceiveStreamInterface::Config audio_config;
+ audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc;
+ audio_config.rtcp_send_transport = transport;
+ audio_config.rtp.remote_ssrc = send_config.rtp.ssrc;
+ audio_config.rtp.extensions = send_config.rtp.extensions;
+ audio_config.decoder_factory = audio_decoder_factory;
+ audio_config.decoder_map = {{kAudioSendPayloadType, {"opus", 48000, 2}}};
+ audio_config.sync_group = sync_group;
+ return audio_config;
+}
+
+void CallTest::CreateMatchingFecConfig(
+ Transport* transport,
+ const VideoSendStream::Config& send_config) {
+ FlexfecReceiveStream::Config config(transport);
+ config.payload_type = send_config.rtp.flexfec.payload_type;
+ config.rtp.remote_ssrc = send_config.rtp.flexfec.ssrc;
+ config.protected_media_ssrcs = send_config.rtp.flexfec.protected_media_ssrcs;
+ config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
+ if (!video_receive_configs_.empty()) {
+ video_receive_configs_[0].rtp.protected_by_flexfec = true;
+ video_receive_configs_[0].rtp.packet_sink_ = this;
+ }
+ flexfec_receive_configs_.push_back(config);
+}
+
+void CallTest::CreateMatchingReceiveConfigs(Transport* rtcp_send_transport) {
+ video_receive_configs_.clear();
+ for (VideoSendStream::Config& video_send_config : video_send_configs_) {
+ CreateMatchingVideoReceiveConfigs(video_send_config, rtcp_send_transport);
+ }
+ CreateMatchingAudioAndFecConfigs(rtcp_send_transport);
+}
+
+void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock,
+ float speed,
+ int framerate,
+ int width,
+ int height) {
+ video_sources_.clear();
+ auto frame_generator_capturer =
+ std::make_unique<test::FrameGeneratorCapturer>(
+ clock,
+ test::CreateSquareFrameGenerator(width, height, absl::nullopt,
+ absl::nullopt),
+ framerate * speed, *task_queue_factory_);
+ frame_generator_capturer_ = frame_generator_capturer.get();
+ frame_generator_capturer->Init();
+ video_sources_.push_back(std::move(frame_generator_capturer));
+ ConnectVideoSourcesToStreams();
+}
+
+void CallTest::CreateFrameGeneratorCapturer(int framerate,
+ int width,
+ int height) {
+ video_sources_.clear();
+ auto frame_generator_capturer =
+ std::make_unique<test::FrameGeneratorCapturer>(
+ clock_,
+ test::CreateSquareFrameGenerator(width, height, absl::nullopt,
+ absl::nullopt),
+ framerate, *task_queue_factory_);
+ frame_generator_capturer_ = frame_generator_capturer.get();
+ frame_generator_capturer->Init();
+ video_sources_.push_back(std::move(frame_generator_capturer));
+ ConnectVideoSourcesToStreams();
+}
+
+void CallTest::CreateFakeAudioDevices(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> capturer,
+ std::unique_ptr<TestAudioDeviceModule::Renderer> renderer) {
+ fake_send_audio_device_ = TestAudioDeviceModule::Create(
+ task_queue_factory_.get(), std::move(capturer), nullptr, 1.f);
+ fake_recv_audio_device_ = TestAudioDeviceModule::Create(
+ task_queue_factory_.get(), nullptr, std::move(renderer), 1.f);
+}
+
+void CallTest::CreateVideoStreams() {
+ RTC_DCHECK(video_receive_streams_.empty());
+ CreateVideoSendStreams();
+ for (size_t i = 0; i < video_receive_configs_.size(); ++i) {
+ video_receive_streams_.push_back(receiver_call_->CreateVideoReceiveStream(
+ video_receive_configs_[i].Copy()));
+ }
+}
+
+void CallTest::CreateVideoSendStreams() {
+ RTC_DCHECK(video_send_streams_.empty());
+
+ // We currently only support testing external fec controllers with a single
+ // VideoSendStream.
+ if (fec_controller_factory_.get()) {
+ RTC_DCHECK_LE(video_send_configs_.size(), 1);
+ }
+
+ // TODO(http://crbug/818127):
+ // Remove this workaround when ALR is not screenshare-specific.
+ std::list<size_t> streams_creation_order;
+ for (size_t i = 0; i < video_send_configs_.size(); ++i) {
+ // If dual streams are created, add the screenshare stream last.
+ if (video_encoder_configs_[i].content_type ==
+ VideoEncoderConfig::ContentType::kScreen) {
+ streams_creation_order.push_back(i);
+ } else {
+ streams_creation_order.push_front(i);
+ }
+ }
+
+ video_send_streams_.resize(video_send_configs_.size(), nullptr);
+
+ for (size_t i : streams_creation_order) {
+ if (fec_controller_factory_.get()) {
+ video_send_streams_[i] = sender_call_->CreateVideoSendStream(
+ video_send_configs_[i].Copy(), video_encoder_configs_[i].Copy(),
+ fec_controller_factory_->CreateFecController());
+ } else {
+ video_send_streams_[i] = sender_call_->CreateVideoSendStream(
+ video_send_configs_[i].Copy(), video_encoder_configs_[i].Copy());
+ }
+ }
+}
+
+void CallTest::CreateVideoSendStream(const VideoEncoderConfig& encoder_config) {
+ RTC_DCHECK(video_send_streams_.empty());
+ video_send_streams_.push_back(sender_call_->CreateVideoSendStream(
+ GetVideoSendConfig()->Copy(), encoder_config.Copy()));
+}
+
+void CallTest::CreateAudioStreams() {
+ RTC_DCHECK(audio_send_stream_ == nullptr);
+ RTC_DCHECK(audio_receive_streams_.empty());
+ audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
+ for (size_t i = 0; i < audio_receive_configs_.size(); ++i) {
+ audio_receive_streams_.push_back(
+ receiver_call_->CreateAudioReceiveStream(audio_receive_configs_[i]));
+ }
+}
+
+void CallTest::CreateFlexfecStreams() {
+ for (size_t i = 0; i < flexfec_receive_configs_.size(); ++i) {
+ flexfec_receive_streams_.push_back(
+ receiver_call_->CreateFlexfecReceiveStream(
+ flexfec_receive_configs_[i]));
+ }
+}
+
+void CallTest::CreateSendTransport(const BuiltInNetworkBehaviorConfig& config,
+ RtpRtcpObserver* observer) {
+ PacketReceiver* receiver =
+ receiver_call_ ? receiver_call_->Receiver() : nullptr;
+
+ auto network = std::make_unique<SimulatedNetwork>(config);
+ send_simulated_network_ = network.get();
+ send_transport_ = std::make_unique<PacketTransport>(
+ task_queue(), sender_call_.get(), observer,
+ test::PacketTransport::kSender, payload_type_map_,
+ std::make_unique<FakeNetworkPipe>(Clock::GetRealTimeClock(),
+ std::move(network), receiver),
+ rtp_extensions_, rtp_extensions_);
+}
+
+void CallTest::CreateReceiveTransport(
+ const BuiltInNetworkBehaviorConfig& config,
+ RtpRtcpObserver* observer) {
+ auto network = std::make_unique<SimulatedNetwork>(config);
+ receive_simulated_network_ = network.get();
+ receive_transport_ = std::make_unique<PacketTransport>(
+ task_queue(), nullptr, observer, test::PacketTransport::kReceiver,
+ payload_type_map_,
+ std::make_unique<FakeNetworkPipe>(Clock::GetRealTimeClock(),
+ std::move(network),
+ sender_call_->Receiver()),
+ rtp_extensions_, rtp_extensions_);
+}
+
+void CallTest::ConnectVideoSourcesToStreams() {
+ for (size_t i = 0; i < video_sources_.size(); ++i)
+ video_send_streams_[i]->SetSource(video_sources_[i].get(),
+ degradation_preference_);
+}
+
+void CallTest::Start() {
+ StartVideoStreams();
+ if (audio_send_stream_) {
+ audio_send_stream_->Start();
+ }
+ for (AudioReceiveStreamInterface* audio_recv_stream : audio_receive_streams_)
+ audio_recv_stream->Start();
+}
+
+void CallTest::StartVideoStreams() {
+ for (size_t i = 0; i < video_send_streams_.size(); ++i) {
+ std::vector<bool> active_rtp_streams(
+ video_send_configs_[i].rtp.ssrcs.size(), true);
+ video_send_streams_[i]->StartPerRtpStream(active_rtp_streams);
+ }
+ for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_)
+ video_recv_stream->Start();
+}
+
+void CallTest::Stop() {
+ for (AudioReceiveStreamInterface* audio_recv_stream : audio_receive_streams_)
+ audio_recv_stream->Stop();
+ if (audio_send_stream_) {
+ audio_send_stream_->Stop();
+ }
+ StopVideoStreams();
+}
+
+void CallTest::StopVideoStreams() {
+ for (VideoSendStream* video_send_stream : video_send_streams_)
+ video_send_stream->Stop();
+ for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_)
+ video_recv_stream->Stop();
+}
+
+void CallTest::DestroyStreams() {
+ if (audio_send_stream_)
+ sender_call_->DestroyAudioSendStream(audio_send_stream_);
+ audio_send_stream_ = nullptr;
+ for (AudioReceiveStreamInterface* audio_recv_stream : audio_receive_streams_)
+ receiver_call_->DestroyAudioReceiveStream(audio_recv_stream);
+
+ DestroyVideoSendStreams();
+
+ for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_)
+ receiver_call_->DestroyVideoReceiveStream(video_recv_stream);
+
+ for (FlexfecReceiveStream* flexfec_recv_stream : flexfec_receive_streams_)
+ receiver_call_->DestroyFlexfecReceiveStream(flexfec_recv_stream);
+
+ video_receive_streams_.clear();
+ video_sources_.clear();
+}
+
+void CallTest::DestroyVideoSendStreams() {
+ for (VideoSendStream* video_send_stream : video_send_streams_)
+ sender_call_->DestroyVideoSendStream(video_send_stream);
+ video_send_streams_.clear();
+}
+
+void CallTest::SetFakeVideoCaptureRotation(VideoRotation rotation) {
+ frame_generator_capturer_->SetFakeRotation(rotation);
+}
+
+void CallTest::SetVideoDegradation(DegradationPreference preference) {
+ GetVideoSendStream()->SetSource(frame_generator_capturer_, preference);
+}
+
+VideoSendStream::Config* CallTest::GetVideoSendConfig() {
+ return &video_send_configs_[0];
+}
+
+void CallTest::SetVideoSendConfig(const VideoSendStream::Config& config) {
+ video_send_configs_.clear();
+ video_send_configs_.push_back(config.Copy());
+}
+
+VideoEncoderConfig* CallTest::GetVideoEncoderConfig() {
+ return &video_encoder_configs_[0];
+}
+
+void CallTest::SetVideoEncoderConfig(const VideoEncoderConfig& config) {
+ video_encoder_configs_.clear();
+ video_encoder_configs_.push_back(config.Copy());
+}
+
+VideoSendStream* CallTest::GetVideoSendStream() {
+ return video_send_streams_[0];
+}
+FlexfecReceiveStream::Config* CallTest::GetFlexFecConfig() {
+ return &flexfec_receive_configs_[0];
+}
+
+void CallTest::OnRtpPacket(const RtpPacketReceived& packet) {
+ // All FlexFEC streams protect all of the video streams.
+ for (FlexfecReceiveStream* flexfec_recv_stream : flexfec_receive_streams_)
+ flexfec_recv_stream->OnRtpPacket(packet);
+}
+
+absl::optional<RtpExtension> CallTest::GetRtpExtensionByUri(
+ const std::string& uri) const {
+ for (const auto& extension : rtp_extensions_) {
+ if (extension.uri == uri) {
+ return extension;
+ }
+ }
+ return absl::nullopt;
+}
+
+void CallTest::AddRtpExtensionByUri(
+ const std::string& uri,
+ std::vector<RtpExtension>* extensions) const {
+ const absl::optional<RtpExtension> extension = GetRtpExtensionByUri(uri);
+ if (extension) {
+ extensions->push_back(*extension);
+ }
+}
+
+constexpr size_t CallTest::kNumSsrcs;
+const int CallTest::kDefaultWidth;
+const int CallTest::kDefaultHeight;
+const int CallTest::kDefaultFramerate;
+const uint32_t CallTest::kSendRtxSsrcs[kNumSsrcs] = {
+ 0xBADCAFD, 0xBADCAFE, 0xBADCAFF, 0xBADCB00, 0xBADCB01, 0xBADCB02};
+const uint32_t CallTest::kVideoSendSsrcs[kNumSsrcs] = {
+ 0xC0FFED, 0xC0FFEE, 0xC0FFEF, 0xC0FFF0, 0xC0FFF1, 0xC0FFF2};
+const uint32_t CallTest::kAudioSendSsrc = 0xDEADBEEF;
+const uint32_t CallTest::kFlexfecSendSsrc = 0xBADBEEF;
+const uint32_t CallTest::kReceiverLocalVideoSsrc = 0x123456;
+const uint32_t CallTest::kReceiverLocalAudioSsrc = 0x1234567;
+const int CallTest::kNackRtpHistoryMs = 1000;
+
+const std::map<uint8_t, MediaType> CallTest::payload_type_map_ = {
+ {CallTest::kVideoSendPayloadType, MediaType::VIDEO},
+ {CallTest::kFakeVideoSendPayloadType, MediaType::VIDEO},
+ {CallTest::kSendRtxPayloadType, MediaType::VIDEO},
+ {CallTest::kPayloadTypeVP8, MediaType::VIDEO},
+ {CallTest::kPayloadTypeVP9, MediaType::VIDEO},
+ {CallTest::kPayloadTypeH264, MediaType::VIDEO},
+ {CallTest::kPayloadTypeGeneric, MediaType::VIDEO},
+ {CallTest::kRedPayloadType, MediaType::VIDEO},
+ {CallTest::kRtxRedPayloadType, MediaType::VIDEO},
+ {CallTest::kUlpfecPayloadType, MediaType::VIDEO},
+ {CallTest::kFlexfecPayloadType, MediaType::VIDEO},
+ {CallTest::kAudioSendPayloadType, MediaType::AUDIO}};
+
+BaseTest::BaseTest() {}
+
+BaseTest::BaseTest(TimeDelta timeout) : RtpRtcpObserver(timeout) {}
+
+BaseTest::~BaseTest() {}
+
+std::unique_ptr<TestAudioDeviceModule::Capturer> BaseTest::CreateCapturer() {
+ return TestAudioDeviceModule::CreatePulsedNoiseCapturer(256, 48000);
+}
+
+std::unique_ptr<TestAudioDeviceModule::Renderer> BaseTest::CreateRenderer() {
+ return TestAudioDeviceModule::CreateDiscardRenderer(48000);
+}
+
+void BaseTest::OnFakeAudioDevicesCreated(
+ TestAudioDeviceModule* send_audio_device,
+ TestAudioDeviceModule* recv_audio_device) {}
+
+void BaseTest::ModifySenderBitrateConfig(BitrateConstraints* bitrate_config) {}
+
+void BaseTest::ModifyReceiverBitrateConfig(BitrateConstraints* bitrate_config) {
+}
+
+void BaseTest::OnCallsCreated(Call* sender_call, Call* receiver_call) {}
+
+void BaseTest::OnTransportCreated(PacketTransport* to_receiver,
+ SimulatedNetworkInterface* sender_network,
+ PacketTransport* to_sender,
+ SimulatedNetworkInterface* receiver_network) {
+}
+
+BuiltInNetworkBehaviorConfig BaseTest::GetSendTransportConfig() const {
+ return BuiltInNetworkBehaviorConfig();
+}
+BuiltInNetworkBehaviorConfig BaseTest::GetReceiveTransportConfig() const {
+ return BuiltInNetworkBehaviorConfig();
+}
+size_t BaseTest::GetNumVideoStreams() const {
+ return 1;
+}
+
+size_t BaseTest::GetNumAudioStreams() const {
+ return 0;
+}
+
+size_t BaseTest::GetNumFlexfecStreams() const {
+ return 0;
+}
+
+void BaseTest::ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) {}
+
+void BaseTest::ModifyVideoCaptureStartResolution(int* width,
+ int* heigt,
+ int* frame_rate) {}
+
+void BaseTest::ModifyVideoDegradationPreference(
+ DegradationPreference* degradation_preference) {}
+
+void BaseTest::OnVideoStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>& receive_streams) {}
+
+void BaseTest::ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStreamInterface::Config>* receive_configs) {}
+
+void BaseTest::OnAudioStreamsCreated(
+ AudioSendStream* send_stream,
+ const std::vector<AudioReceiveStreamInterface*>& receive_streams) {}
+
+void BaseTest::ModifyFlexfecConfigs(
+ std::vector<FlexfecReceiveStream::Config>* receive_configs) {}
+
+void BaseTest::OnFlexfecStreamsCreated(
+ const std::vector<FlexfecReceiveStream*>& receive_streams) {}
+
+void BaseTest::OnFrameGeneratorCapturerCreated(
+ FrameGeneratorCapturer* frame_generator_capturer) {}
+
+void BaseTest::OnStreamsStopped() {}
+
+SendTest::SendTest(TimeDelta timeout) : BaseTest(timeout) {}
+
+bool SendTest::ShouldCreateReceivers() const {
+ return false;
+}
+
+EndToEndTest::EndToEndTest() {}
+
+EndToEndTest::EndToEndTest(TimeDelta timeout) : BaseTest(timeout) {}
+
+bool EndToEndTest::ShouldCreateReceivers() const {
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/call_test.h b/third_party/libwebrtc/test/call_test.h
new file mode 100644
index 0000000000..3324bc4b5e
--- /dev/null
+++ b/third_party/libwebrtc/test/call_test.h
@@ -0,0 +1,359 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_CALL_TEST_H_
+#define TEST_CALL_TEST_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/simulated_network.h"
+#include "api/test/video/function_video_decoder_factory.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "call/call.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "test/encoder_settings.h"
+#include "test/fake_decoder.h"
+#include "test/fake_videorenderer.h"
+#include "test/fake_vp8_encoder.h"
+#include "test/frame_generator_capturer.h"
+#include "test/rtp_rtcp_observer.h"
+#include "test/run_loop.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+namespace test {
+
+class BaseTest;
+
+class CallTest : public ::testing::Test, public RtpPacketSinkInterface {
+ public:
+ CallTest();
+ virtual ~CallTest();
+
+ static constexpr size_t kNumSsrcs = 6;
+ static const int kNumSimulcastStreams = 3;
+ static const int kDefaultWidth = 320;
+ static const int kDefaultHeight = 180;
+ static const int kDefaultFramerate = 30;
+ static constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(30);
+ static constexpr TimeDelta kLongTimeout = TimeDelta::Seconds(120);
+ enum classPayloadTypes : uint8_t {
+ kSendRtxPayloadType = 98,
+ kRtxRedPayloadType = 99,
+ kVideoSendPayloadType = 100,
+ kAudioSendPayloadType = 103,
+ kRedPayloadType = 118,
+ kUlpfecPayloadType = 119,
+ kFlexfecPayloadType = 120,
+ kPayloadTypeH264 = 122,
+ kPayloadTypeVP8 = 123,
+ kPayloadTypeVP9 = 124,
+ kPayloadTypeGeneric = 125,
+ kFakeVideoSendPayloadType = 126,
+ };
+ static const uint32_t kSendRtxSsrcs[kNumSsrcs];
+ static const uint32_t kVideoSendSsrcs[kNumSsrcs];
+ static const uint32_t kAudioSendSsrc;
+ static const uint32_t kFlexfecSendSsrc;
+ static const uint32_t kReceiverLocalVideoSsrc;
+ static const uint32_t kReceiverLocalAudioSsrc;
+ static const int kNackRtpHistoryMs;
+ static const std::map<uint8_t, MediaType> payload_type_map_;
+
+ protected:
+ void RegisterRtpExtension(const RtpExtension& extension);
+ // Returns header extensions that can be parsed by the transport.
+ rtc::ArrayView<const RtpExtension> GetRegisteredExtensions() {
+ return rtp_extensions_;
+ }
+
+ // RunBaseTest overwrites the audio_state of the send and receive Call configs
+ // to simplify test code.
+ void RunBaseTest(BaseTest* test);
+
+ void CreateCalls();
+ void CreateCalls(const Call::Config& sender_config,
+ const Call::Config& receiver_config);
+ void CreateSenderCall();
+ void CreateSenderCall(const Call::Config& config);
+ void CreateReceiverCall(const Call::Config& config);
+ void DestroyCalls();
+
+ void CreateVideoSendConfig(VideoSendStream::Config* video_config,
+ size_t num_video_streams,
+ size_t num_used_ssrcs,
+ Transport* send_transport);
+ void CreateAudioAndFecSendConfigs(size_t num_audio_streams,
+ size_t num_flexfec_streams,
+ Transport* send_transport);
+ void SetAudioConfig(const AudioSendStream::Config& config);
+
+ void SetSendFecConfig(std::vector<uint32_t> video_send_ssrcs);
+ void SetSendUlpFecConfig(VideoSendStream::Config* send_config);
+ void SetReceiveUlpFecConfig(
+ VideoReceiveStreamInterface::Config* receive_config);
+
+ void CreateSendConfig(size_t num_video_streams,
+ size_t num_audio_streams,
+ size_t num_flexfec_streams) {
+ CreateSendConfig(num_video_streams, num_audio_streams, num_flexfec_streams,
+ send_transport_.get());
+ }
+ void CreateSendConfig(size_t num_video_streams,
+ size_t num_audio_streams,
+ size_t num_flexfec_streams,
+ Transport* send_transport);
+
+ void CreateMatchingVideoReceiveConfigs(
+ const VideoSendStream::Config& video_send_config) {
+ CreateMatchingVideoReceiveConfigs(video_send_config,
+ receive_transport_.get());
+ }
+ void CreateMatchingVideoReceiveConfigs(
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport);
+ void CreateMatchingVideoReceiveConfigs(
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory,
+ absl::optional<size_t> decode_sub_stream,
+ bool receiver_reference_time_report,
+ int rtp_history_ms);
+ void AddMatchingVideoReceiveConfigs(
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ const VideoSendStream::Config& video_send_config,
+ Transport* rtcp_send_transport,
+ VideoDecoderFactory* decoder_factory,
+ absl::optional<size_t> decode_sub_stream,
+ bool receiver_reference_time_report,
+ int rtp_history_ms);
+
+ void CreateMatchingAudioAndFecConfigs(Transport* rtcp_send_transport);
+ void CreateMatchingAudioConfigs(Transport* transport, std::string sync_group);
+ static AudioReceiveStreamInterface::Config CreateMatchingAudioConfig(
+ const AudioSendStream::Config& send_config,
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
+ Transport* transport,
+ std::string sync_group);
+ void CreateMatchingFecConfig(
+ Transport* transport,
+ const VideoSendStream::Config& video_send_config);
+ void CreateMatchingReceiveConfigs() {
+ CreateMatchingReceiveConfigs(receive_transport_.get());
+ }
+ void CreateMatchingReceiveConfigs(Transport* rtcp_send_transport);
+
+ void CreateFrameGeneratorCapturerWithDrift(Clock* drift_clock,
+ float speed,
+ int framerate,
+ int width,
+ int height);
+ void CreateFrameGeneratorCapturer(int framerate, int width, int height);
+ void CreateFakeAudioDevices(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> capturer,
+ std::unique_ptr<TestAudioDeviceModule::Renderer> renderer);
+
+ void CreateVideoStreams();
+ void CreateVideoSendStreams();
+ void CreateVideoSendStream(const VideoEncoderConfig& encoder_config);
+ void CreateAudioStreams();
+ void CreateFlexfecStreams();
+
+ // Receiver call must be created before calling CreateSendTransport in order
+ // to set a receiver.
+ // Rtp header extensions must be registered (RegisterRtpExtension(..)) before
+ // the transport is created in order for the receiving call object receive RTP
+ // packets with extensions.
+ void CreateSendTransport(const BuiltInNetworkBehaviorConfig& config,
+ RtpRtcpObserver* observer);
+ void CreateReceiveTransport(const BuiltInNetworkBehaviorConfig& config,
+ RtpRtcpObserver* observer);
+
+ void ConnectVideoSourcesToStreams();
+
+ void Start();
+ void StartVideoStreams();
+ void Stop();
+ void StopVideoStreams();
+ void DestroyStreams();
+ void DestroyVideoSendStreams();
+ void SetFakeVideoCaptureRotation(VideoRotation rotation);
+
+ void SetVideoDegradation(DegradationPreference preference);
+
+ VideoSendStream::Config* GetVideoSendConfig();
+ void SetVideoSendConfig(const VideoSendStream::Config& config);
+ VideoEncoderConfig* GetVideoEncoderConfig();
+ void SetVideoEncoderConfig(const VideoEncoderConfig& config);
+ VideoSendStream* GetVideoSendStream();
+ FlexfecReceiveStream::Config* GetFlexFecConfig();
+ TaskQueueBase* task_queue() { return task_queue_.get(); }
+
+ // RtpPacketSinkInterface implementation.
+ void OnRtpPacket(const RtpPacketReceived& packet) override;
+
+ test::RunLoop loop_;
+
+ Clock* const clock_;
+ test::ScopedKeyValueConfig field_trials_;
+
+ std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::unique_ptr<webrtc::RtcEventLog> send_event_log_;
+ std::unique_ptr<webrtc::RtcEventLog> recv_event_log_;
+ std::unique_ptr<Call> sender_call_;
+ std::unique_ptr<PacketTransport> send_transport_;
+ SimulatedNetworkInterface* send_simulated_network_ = nullptr;
+ std::vector<VideoSendStream::Config> video_send_configs_;
+ std::vector<VideoEncoderConfig> video_encoder_configs_;
+ std::vector<VideoSendStream*> video_send_streams_;
+ AudioSendStream::Config audio_send_config_;
+ AudioSendStream* audio_send_stream_;
+
+ std::unique_ptr<Call> receiver_call_;
+ std::unique_ptr<PacketTransport> receive_transport_;
+ SimulatedNetworkInterface* receive_simulated_network_ = nullptr;
+ std::vector<VideoReceiveStreamInterface::Config> video_receive_configs_;
+ std::vector<VideoReceiveStreamInterface*> video_receive_streams_;
+ std::vector<AudioReceiveStreamInterface::Config> audio_receive_configs_;
+ std::vector<AudioReceiveStreamInterface*> audio_receive_streams_;
+ std::vector<FlexfecReceiveStream::Config> flexfec_receive_configs_;
+ std::vector<FlexfecReceiveStream*> flexfec_receive_streams_;
+
+ test::FrameGeneratorCapturer* frame_generator_capturer_;
+ std::vector<std::unique_ptr<rtc::VideoSourceInterface<VideoFrame>>>
+ video_sources_;
+ DegradationPreference degradation_preference_ =
+ DegradationPreference::MAINTAIN_FRAMERATE;
+
+ std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory_;
+ std::unique_ptr<NetworkStatePredictorFactoryInterface>
+ network_state_predictor_factory_;
+ std::unique_ptr<NetworkControllerFactoryInterface>
+ network_controller_factory_;
+
+ test::FunctionVideoEncoderFactory fake_encoder_factory_;
+ int fake_encoder_max_bitrate_ = -1;
+ test::FunctionVideoDecoderFactory fake_decoder_factory_;
+ std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
+ // Number of simulcast substreams.
+ size_t num_video_streams_;
+ size_t num_audio_streams_;
+ size_t num_flexfec_streams_;
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory_;
+ rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory_;
+ test::FakeVideoRenderer fake_renderer_;
+
+
+ private:
+ absl::optional<RtpExtension> GetRtpExtensionByUri(
+ const std::string& uri) const;
+
+ void AddRtpExtensionByUri(const std::string& uri,
+ std::vector<RtpExtension>* extensions) const;
+
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+ std::vector<RtpExtension> rtp_extensions_;
+ rtc::scoped_refptr<AudioProcessing> apm_send_;
+ rtc::scoped_refptr<AudioProcessing> apm_recv_;
+ rtc::scoped_refptr<TestAudioDeviceModule> fake_send_audio_device_;
+ rtc::scoped_refptr<TestAudioDeviceModule> fake_recv_audio_device_;
+};
+
+class BaseTest : public RtpRtcpObserver {
+ public:
+ BaseTest();
+ explicit BaseTest(TimeDelta timeout);
+ virtual ~BaseTest();
+
+ virtual void PerformTest() = 0;
+ virtual bool ShouldCreateReceivers() const = 0;
+
+ virtual size_t GetNumVideoStreams() const;
+ virtual size_t GetNumAudioStreams() const;
+ virtual size_t GetNumFlexfecStreams() const;
+
+ virtual std::unique_ptr<TestAudioDeviceModule::Capturer> CreateCapturer();
+ virtual std::unique_ptr<TestAudioDeviceModule::Renderer> CreateRenderer();
+ virtual void OnFakeAudioDevicesCreated(
+ TestAudioDeviceModule* send_audio_device,
+ TestAudioDeviceModule* recv_audio_device);
+
+ virtual void ModifySenderBitrateConfig(BitrateConstraints* bitrate_config);
+ virtual void ModifyReceiverBitrateConfig(BitrateConstraints* bitrate_config);
+
+ virtual void OnCallsCreated(Call* sender_call, Call* receiver_call);
+ virtual void OnTransportCreated(PacketTransport* to_receiver,
+ SimulatedNetworkInterface* sender_network,
+ PacketTransport* to_sender,
+ SimulatedNetworkInterface* receiver_network);
+
+ virtual BuiltInNetworkBehaviorConfig GetSendTransportConfig() const;
+ virtual BuiltInNetworkBehaviorConfig GetReceiveTransportConfig() const;
+
+ virtual void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStreamInterface::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config);
+ virtual void ModifyVideoCaptureStartResolution(int* width,
+ int* heigt,
+ int* frame_rate);
+ virtual void ModifyVideoDegradationPreference(
+ DegradationPreference* degradation_preference);
+
+ virtual void OnVideoStreamsCreated(
+ VideoSendStream* send_stream,
+ const std::vector<VideoReceiveStreamInterface*>& receive_streams);
+
+ virtual void ModifyAudioConfigs(
+ AudioSendStream::Config* send_config,
+ std::vector<AudioReceiveStreamInterface::Config>* receive_configs);
+ virtual void OnAudioStreamsCreated(
+ AudioSendStream* send_stream,
+ const std::vector<AudioReceiveStreamInterface*>& receive_streams);
+
+ virtual void ModifyFlexfecConfigs(
+ std::vector<FlexfecReceiveStream::Config>* receive_configs);
+ virtual void OnFlexfecStreamsCreated(
+ const std::vector<FlexfecReceiveStream*>& receive_streams);
+
+ virtual void OnFrameGeneratorCapturerCreated(
+ FrameGeneratorCapturer* frame_generator_capturer);
+
+ virtual void OnStreamsStopped();
+};
+
+class SendTest : public BaseTest {
+ public:
+ explicit SendTest(TimeDelta timeout);
+
+ bool ShouldCreateReceivers() const override;
+};
+
+class EndToEndTest : public BaseTest {
+ public:
+ EndToEndTest();
+ explicit EndToEndTest(TimeDelta timeout);
+
+ bool ShouldCreateReceivers() const override;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_CALL_TEST_H_
diff --git a/third_party/libwebrtc/test/configurable_frame_size_encoder.cc b/third_party/libwebrtc/test/configurable_frame_size_encoder.cc
new file mode 100644
index 0000000000..e3965ef770
--- /dev/null
+++ b/third_party/libwebrtc/test/configurable_frame_size_encoder.cc
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/configurable_frame_size_encoder.h"
+
+#include <string.h>
+
+#include <cstdint>
+#include <type_traits>
+#include <utility>
+
+#include "api/video/encoded_image.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+ConfigurableFrameSizeEncoder::ConfigurableFrameSizeEncoder(
+ size_t max_frame_size)
+ : callback_(NULL),
+ current_frame_size_(max_frame_size),
+ codec_type_(kVideoCodecGeneric) {}
+
+ConfigurableFrameSizeEncoder::~ConfigurableFrameSizeEncoder() {}
+
+void ConfigurableFrameSizeEncoder::SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) {
+ // Ignored.
+}
+
+int32_t ConfigurableFrameSizeEncoder::InitEncode(
+ const VideoCodec* codec_settings,
+ const Settings& settings) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t ConfigurableFrameSizeEncoder::Encode(
+ const VideoFrame& inputImage,
+ const std::vector<VideoFrameType>* frame_types) {
+ EncodedImage encodedImage;
+ auto buffer = EncodedImageBuffer::Create(current_frame_size_);
+ memset(buffer->data(), 0, current_frame_size_);
+ encodedImage.SetEncodedData(buffer);
+ encodedImage._encodedHeight = inputImage.height();
+ encodedImage._encodedWidth = inputImage.width();
+ encodedImage._frameType = VideoFrameType::kVideoFrameKey;
+ encodedImage.SetTimestamp(inputImage.timestamp());
+ encodedImage.capture_time_ms_ = inputImage.render_time_ms();
+ CodecSpecificInfo specific{};
+ specific.codecType = codec_type_;
+ callback_->OnEncodedImage(encodedImage, &specific);
+ if (post_encode_callback_) {
+ (*post_encode_callback_)();
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t ConfigurableFrameSizeEncoder::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t ConfigurableFrameSizeEncoder::Release() {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void ConfigurableFrameSizeEncoder::SetRates(
+ const RateControlParameters& parameters) {}
+
+int32_t ConfigurableFrameSizeEncoder::SetFrameSize(size_t size) {
+ current_frame_size_ = size;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void ConfigurableFrameSizeEncoder::SetCodecType(VideoCodecType codec_type) {
+ codec_type_ = codec_type;
+}
+
+void ConfigurableFrameSizeEncoder::RegisterPostEncodeCallback(
+ std::function<void(void)> post_encode_callback) {
+ post_encode_callback_ = std::move(post_encode_callback);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/configurable_frame_size_encoder.h b/third_party/libwebrtc/test/configurable_frame_size_encoder.h
new file mode 100644
index 0000000000..8dd5157b5b
--- /dev/null
+++ b/third_party/libwebrtc/test/configurable_frame_size_encoder.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_
+#define TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <functional>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+namespace test {
+
+class ConfigurableFrameSizeEncoder : public VideoEncoder {
+ public:
+ explicit ConfigurableFrameSizeEncoder(size_t max_frame_size);
+ ~ConfigurableFrameSizeEncoder() override;
+
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override;
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) override;
+
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ void SetRates(const RateControlParameters& parameters) override;
+
+ int32_t SetFrameSize(size_t size);
+
+ void SetCodecType(VideoCodecType codec_type_);
+
+ void RegisterPostEncodeCallback(
+ std::function<void(void)> post_encode_callback);
+
+ private:
+ EncodedImageCallback* callback_;
+ absl::optional<std::function<void(void)>> post_encode_callback_;
+
+ size_t current_frame_size_;
+ VideoCodecType codec_type_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_
diff --git a/third_party/libwebrtc/test/direct_transport.cc b/third_party/libwebrtc/test/direct_transport.cc
new file mode 100644
index 0000000000..260497947c
--- /dev/null
+++ b/third_party/libwebrtc/test/direct_transport.cc
@@ -0,0 +1,145 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/direct_transport.h"
+
+#include "api/media_types.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "call/call.h"
+#include "call/fake_network_pipe.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+namespace test {
+
+Demuxer::Demuxer(const std::map<uint8_t, MediaType>& payload_type_map)
+ : payload_type_map_(payload_type_map) {}
+
+MediaType Demuxer::GetMediaType(const uint8_t* packet_data,
+ const size_t packet_length) const {
+ if (IsRtpPacket(rtc::MakeArrayView(packet_data, packet_length))) {
+ RTC_CHECK_GE(packet_length, 2);
+ const uint8_t payload_type = packet_data[1] & 0x7f;
+ std::map<uint8_t, MediaType>::const_iterator it =
+ payload_type_map_.find(payload_type);
+ RTC_CHECK(it != payload_type_map_.end())
+ << "payload type " << static_cast<int>(payload_type) << " unknown.";
+ return it->second;
+ }
+ return MediaType::ANY;
+}
+
+DirectTransport::DirectTransport(
+ TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions)
+ : send_call_(send_call),
+ task_queue_(task_queue),
+ demuxer_(payload_type_map),
+ fake_network_(std::move(pipe)),
+ audio_extensions_(audio_extensions),
+ video_extensions_(video_extensions) {
+ Start();
+}
+
+DirectTransport::~DirectTransport() {
+ next_process_task_.Stop();
+}
+
+void DirectTransport::SetReceiver(PacketReceiver* receiver) {
+ fake_network_->SetReceiver(receiver);
+}
+
+bool DirectTransport::SendRtp(const uint8_t* data,
+ size_t length,
+ const PacketOptions& options) {
+ if (send_call_) {
+ rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis());
+ sent_packet.info.included_in_feedback = options.included_in_feedback;
+ sent_packet.info.included_in_allocation = options.included_in_allocation;
+ sent_packet.info.packet_size_bytes = length;
+ sent_packet.info.packet_type = rtc::PacketType::kData;
+ send_call_->OnSentPacket(sent_packet);
+ }
+
+ const RtpHeaderExtensionMap* extensions = nullptr;
+ MediaType media_type = demuxer_.GetMediaType(data, length);
+ switch (demuxer_.GetMediaType(data, length)) {
+ case webrtc::MediaType::AUDIO:
+ extensions = &audio_extensions_;
+ break;
+ case webrtc::MediaType::VIDEO:
+ extensions = &video_extensions_;
+ break;
+ default:
+ RTC_CHECK_NOTREACHED();
+ }
+ RtpPacketReceived packet(extensions, Timestamp::Micros(rtc::TimeMicros()));
+ if (media_type == MediaType::VIDEO) {
+ packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
+ }
+ RTC_CHECK(packet.Parse(rtc::CopyOnWriteBuffer(data, length)));
+ fake_network_->DeliverRtpPacket(
+ media_type, std::move(packet),
+ [](const RtpPacketReceived& packet) { return false; });
+
+ MutexLock lock(&process_lock_);
+ if (!next_process_task_.Running())
+ ProcessPackets();
+ return true;
+}
+
+bool DirectTransport::SendRtcp(const uint8_t* data, size_t length) {
+ fake_network_->DeliverRtcpPacket(rtc::CopyOnWriteBuffer(data, length));
+ MutexLock lock(&process_lock_);
+ if (!next_process_task_.Running())
+ ProcessPackets();
+ return true;
+}
+
+int DirectTransport::GetAverageDelayMs() {
+ return fake_network_->AverageDelay();
+}
+
+void DirectTransport::Start() {
+ RTC_DCHECK(task_queue_);
+ if (send_call_) {
+ send_call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ send_call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ }
+}
+
+void DirectTransport::ProcessPackets() {
+ absl::optional<int64_t> initial_delay_ms =
+ fake_network_->TimeUntilNextProcess();
+ if (initial_delay_ms == absl::nullopt)
+ return;
+
+ next_process_task_ = RepeatingTaskHandle::DelayedStart(
+ task_queue_, TimeDelta::Millis(*initial_delay_ms), [this] {
+ fake_network_->Process();
+ if (auto delay_ms = fake_network_->TimeUntilNextProcess())
+ return TimeDelta::Millis(*delay_ms);
+ // Otherwise stop the task.
+ MutexLock lock(&process_lock_);
+ next_process_task_.Stop();
+ // Since this task is stopped, return value doesn't matter.
+ return TimeDelta::Zero();
+ });
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/direct_transport.h b/third_party/libwebrtc/test/direct_transport.h
new file mode 100644
index 0000000000..468e339c0a
--- /dev/null
+++ b/third_party/libwebrtc/test/direct_transport.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_DIRECT_TRANSPORT_H_
+#define TEST_DIRECT_TRANSPORT_H_
+
+#include <memory>
+
+#include "api/call/transport.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/simulated_network.h"
+#include "call/call.h"
+#include "call/simulated_packet_receiver.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+
+class PacketReceiver;
+
+namespace test {
+class Demuxer {
+ public:
+ explicit Demuxer(const std::map<uint8_t, MediaType>& payload_type_map);
+ ~Demuxer() = default;
+
+ Demuxer(const Demuxer&) = delete;
+ Demuxer& operator=(const Demuxer&) = delete;
+
+ MediaType GetMediaType(const uint8_t* packet_data,
+ size_t packet_length) const;
+ const std::map<uint8_t, MediaType> payload_type_map_;
+};
+
+// Objects of this class are expected to be allocated and destroyed on the
+// same task-queue - the one that's passed in via the constructor.
+class DirectTransport : public Transport {
+ public:
+ DirectTransport(TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions);
+
+ ~DirectTransport() override;
+
+ // TODO(holmer): Look into moving this to the constructor.
+ virtual void SetReceiver(PacketReceiver* receiver);
+
+ bool SendRtp(const uint8_t* data,
+ size_t length,
+ const PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* data, size_t length) override;
+
+ int GetAverageDelayMs();
+
+ private:
+ void ProcessPackets() RTC_EXCLUSIVE_LOCKS_REQUIRED(&process_lock_);
+ void LegacySendPacket(const uint8_t* data, size_t length);
+ void Start();
+
+ Call* const send_call_;
+
+ TaskQueueBase* const task_queue_;
+
+ Mutex process_lock_;
+ RepeatingTaskHandle next_process_task_ RTC_GUARDED_BY(&process_lock_);
+
+ const Demuxer demuxer_;
+ const std::unique_ptr<SimulatedPacketReceiverInterface> fake_network_;
+ const RtpHeaderExtensionMap audio_extensions_;
+ const RtpHeaderExtensionMap video_extensions_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_DIRECT_TRANSPORT_H_
diff --git a/third_party/libwebrtc/test/direct_transport_unittest.cc b/third_party/libwebrtc/test/direct_transport_unittest.cc
new file mode 100644
index 0000000000..ab00971089
--- /dev/null
+++ b/third_party/libwebrtc/test/direct_transport_unittest.cc
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/direct_transport.h"
+
+#include <string.h>
+
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+TEST(DemuxerTest, Demuxing) {
+ constexpr uint8_t kVideoPayloadType = 100;
+ constexpr uint8_t kAudioPayloadType = 101;
+ constexpr size_t kPacketSize = 12;
+ Demuxer demuxer({{kVideoPayloadType, MediaType::VIDEO},
+ {kAudioPayloadType, MediaType::AUDIO}});
+
+ uint8_t data[kPacketSize];
+ memset(data, 0, kPacketSize);
+ data[0] = 0x80;
+ data[1] = kVideoPayloadType;
+ EXPECT_EQ(demuxer.GetMediaType(data, kPacketSize), MediaType::VIDEO);
+ data[1] = kAudioPayloadType;
+ EXPECT_EQ(demuxer.GetMediaType(data, kPacketSize), MediaType::AUDIO);
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/drifting_clock.cc b/third_party/libwebrtc/test/drifting_clock.cc
new file mode 100644
index 0000000000..47c8e56916
--- /dev/null
+++ b/third_party/libwebrtc/test/drifting_clock.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/drifting_clock.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+constexpr float DriftingClock::kNoDrift;
+
+DriftingClock::DriftingClock(Clock* clock, float speed)
+ : clock_(clock), drift_(speed - 1.0f), start_time_(clock_->CurrentTime()) {
+ RTC_CHECK(clock);
+ RTC_CHECK_GT(speed, 0.0f);
+}
+
+TimeDelta DriftingClock::Drift() const {
+ auto now = clock_->CurrentTime();
+ RTC_DCHECK_GE(now, start_time_);
+ return (now - start_time_) * drift_;
+}
+
+Timestamp DriftingClock::Drift(Timestamp timestamp) const {
+ return timestamp + Drift() / 1000.;
+}
+
+NtpTime DriftingClock::Drift(NtpTime ntp_time) const {
+ // NTP precision is 1/2^32 seconds, i.e. 2^32 ntp fractions = 1 second.
+ const double kNtpFracPerMicroSecond = 4294.967296; // = 2^32 / 10^6
+
+ uint64_t total_fractions = static_cast<uint64_t>(ntp_time);
+ total_fractions += Drift().us() * kNtpFracPerMicroSecond;
+ return NtpTime(total_fractions);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/drifting_clock.h b/third_party/libwebrtc/test/drifting_clock.h
new file mode 100644
index 0000000000..4a2500ba30
--- /dev/null
+++ b/third_party/libwebrtc/test/drifting_clock.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_DRIFTING_CLOCK_H_
+#define TEST_DRIFTING_CLOCK_H_
+
+#include <stdint.h>
+
+#include "system_wrappers/include/clock.h"
+#include "system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+namespace test {
+class DriftingClock : public Clock {
+ public:
+ static constexpr float kNoDrift = 1.0f;
+
+ DriftingClock(Clock* clock, float speed);
+
+ static constexpr float PercentsFaster(float percent) {
+ return 1.0f + percent / 100.0f;
+ }
+ static constexpr float PercentsSlower(float percent) {
+ return 1.0f - percent / 100.0f;
+ }
+
+ Timestamp CurrentTime() override { return Drift(clock_->CurrentTime()); }
+ NtpTime ConvertTimestampToNtpTime(Timestamp timestamp) override {
+ return Drift(clock_->ConvertTimestampToNtpTime(timestamp));
+ }
+
+ private:
+ TimeDelta Drift() const;
+ Timestamp Drift(Timestamp timestamp) const;
+ NtpTime Drift(NtpTime ntp_time) const;
+
+ Clock* const clock_;
+ const float drift_;
+ const Timestamp start_time_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_DRIFTING_CLOCK_H_
diff --git a/third_party/libwebrtc/test/encoder_settings.cc b/third_party/libwebrtc/test/encoder_settings.cc
new file mode 100644
index 0000000000..f5b298b107
--- /dev/null
+++ b/third_party/libwebrtc/test/encoder_settings.cc
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/encoder_settings.h"
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "call/rtp_config.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+const size_t DefaultVideoStreamFactory::kMaxNumberOfStreams;
+const int DefaultVideoStreamFactory::kMaxBitratePerStream[] = {150000, 450000,
+ 1500000};
+const int DefaultVideoStreamFactory::kDefaultMinBitratePerStream[] = {
+ 30000, 200000, 700000};
+
+// static
+std::vector<VideoStream> CreateVideoStreams(
+ int width,
+ int height,
+ const webrtc::VideoEncoderConfig& encoder_config) {
+ RTC_DCHECK(encoder_config.number_of_streams <=
+ DefaultVideoStreamFactory::kMaxNumberOfStreams);
+
+ std::vector<VideoStream> stream_settings(encoder_config.number_of_streams);
+
+ int bitrate_left_bps = 0;
+ if (encoder_config.max_bitrate_bps > 0) {
+ bitrate_left_bps = encoder_config.max_bitrate_bps;
+ } else {
+ for (size_t stream_num = 0; stream_num < encoder_config.number_of_streams;
+ ++stream_num) {
+ bitrate_left_bps +=
+ DefaultVideoStreamFactory::kMaxBitratePerStream[stream_num];
+ }
+ }
+
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ stream_settings[i].width =
+ (i + 1) * width / encoder_config.number_of_streams;
+ stream_settings[i].height =
+ (i + 1) * height / encoder_config.number_of_streams;
+ stream_settings[i].max_framerate = 30;
+ stream_settings[i].max_qp = 56;
+ stream_settings[i].min_bitrate_bps =
+ DefaultVideoStreamFactory::kDefaultMinBitratePerStream[i];
+
+ // Use configured values instead of default values if set.
+ const VideoStream stream = (i < encoder_config.simulcast_layers.size())
+ ? encoder_config.simulcast_layers[i]
+ : VideoStream();
+
+ int max_bitrate_bps =
+ stream.max_bitrate_bps > 0
+ ? stream.max_bitrate_bps
+ : DefaultVideoStreamFactory::kMaxBitratePerStream[i];
+ max_bitrate_bps = std::min(bitrate_left_bps, max_bitrate_bps);
+
+ int target_bitrate_bps = stream.target_bitrate_bps > 0
+ ? stream.target_bitrate_bps
+ : max_bitrate_bps;
+ target_bitrate_bps = std::min(max_bitrate_bps, target_bitrate_bps);
+
+ if (stream.min_bitrate_bps > 0) {
+ RTC_DCHECK_LE(stream.min_bitrate_bps, target_bitrate_bps);
+ stream_settings[i].min_bitrate_bps = stream.min_bitrate_bps;
+ }
+ if (stream.max_framerate > 0) {
+ stream_settings[i].max_framerate = stream.max_framerate;
+ }
+ if (stream.num_temporal_layers) {
+ RTC_DCHECK_GE(*stream.num_temporal_layers, 1);
+ stream_settings[i].num_temporal_layers = stream.num_temporal_layers;
+ }
+ if (stream.scale_resolution_down_by >= 1.0) {
+ stream_settings[i].width = width / stream.scale_resolution_down_by;
+ stream_settings[i].height = height / stream.scale_resolution_down_by;
+ }
+ stream_settings[i].scalability_mode = stream.scalability_mode;
+ stream_settings[i].target_bitrate_bps = target_bitrate_bps;
+ stream_settings[i].max_bitrate_bps = max_bitrate_bps;
+ stream_settings[i].active =
+ encoder_config.number_of_streams == 1 || stream.active;
+
+ bitrate_left_bps -= stream_settings[i].target_bitrate_bps;
+ }
+
+ stream_settings[encoder_config.number_of_streams - 1].max_bitrate_bps +=
+ bitrate_left_bps;
+ stream_settings[0].bitrate_priority = encoder_config.bitrate_priority;
+
+ return stream_settings;
+}
+
+DefaultVideoStreamFactory::DefaultVideoStreamFactory() {}
+
+std::vector<VideoStream> DefaultVideoStreamFactory::CreateEncoderStreams(
+ int frame_width,
+ int frame_height,
+ const webrtc::VideoEncoderConfig& encoder_config) {
+ return CreateVideoStreams(frame_width, frame_height, encoder_config);
+}
+
+void FillEncoderConfiguration(VideoCodecType codec_type,
+ size_t num_streams,
+ VideoEncoderConfig* configuration) {
+ RTC_DCHECK_LE(num_streams, DefaultVideoStreamFactory::kMaxNumberOfStreams);
+
+ configuration->codec_type = codec_type;
+ configuration->number_of_streams = num_streams;
+ configuration->video_stream_factory =
+ rtc::make_ref_counted<DefaultVideoStreamFactory>();
+ configuration->max_bitrate_bps = 0;
+ configuration->frame_drop_enabled = true;
+ configuration->simulcast_layers = std::vector<VideoStream>(num_streams);
+ for (size_t i = 0; i < num_streams; ++i) {
+ configuration->max_bitrate_bps +=
+ DefaultVideoStreamFactory::kMaxBitratePerStream[i];
+ }
+}
+
+VideoReceiveStreamInterface::Decoder CreateMatchingDecoder(
+ int payload_type,
+ const std::string& payload_name) {
+ VideoReceiveStreamInterface::Decoder decoder;
+ decoder.payload_type = payload_type;
+ decoder.video_format = SdpVideoFormat(payload_name);
+ return decoder;
+}
+
+VideoReceiveStreamInterface::Decoder CreateMatchingDecoder(
+ const VideoSendStream::Config& config) {
+ return CreateMatchingDecoder(config.rtp.payload_type,
+ config.rtp.payload_name);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/encoder_settings.h b/third_party/libwebrtc/test/encoder_settings.h
new file mode 100644
index 0000000000..6dbad0fee2
--- /dev/null
+++ b/third_party/libwebrtc/test/encoder_settings.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_ENCODER_SETTINGS_H_
+#define TEST_ENCODER_SETTINGS_H_
+
+#include <stddef.h>
+
+#include <string>
+#include <vector>
+
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "video/config/video_encoder_config.h"
+
+namespace webrtc {
+namespace test {
+
+class DefaultVideoStreamFactory
+ : public VideoEncoderConfig::VideoStreamFactoryInterface {
+ public:
+ DefaultVideoStreamFactory();
+
+ static const size_t kMaxNumberOfStreams = 3;
+ // Defined as {150000, 450000, 1500000};
+ static const int kMaxBitratePerStream[];
+ // Defined as {50000, 200000, 700000};
+ static const int kDefaultMinBitratePerStream[];
+
+ private:
+ std::vector<VideoStream> CreateEncoderStreams(
+ int frame_width,
+ int frame_height,
+ const webrtc::VideoEncoderConfig& encoder_config) override;
+};
+
+// Creates `encoder_config.number_of_streams` VideoStreams where index
+// `encoder_config.number_of_streams -1` have width = `width`, height =
+// `height`. The total max bitrate of all VideoStreams is
+// `encoder_config.max_bitrate_bps`.
+std::vector<VideoStream> CreateVideoStreams(
+ int width,
+ int height,
+ const webrtc::VideoEncoderConfig& encoder_config);
+
+void FillEncoderConfiguration(VideoCodecType codec_type,
+ size_t num_streams,
+ VideoEncoderConfig* configuration);
+
+VideoReceiveStreamInterface::Decoder CreateMatchingDecoder(
+ int payload_type,
+ const std::string& payload_name);
+
+VideoReceiveStreamInterface::Decoder CreateMatchingDecoder(
+ const VideoSendStream::Config& config);
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_ENCODER_SETTINGS_H_
diff --git a/third_party/libwebrtc/test/explicit_key_value_config.cc b/third_party/libwebrtc/test/explicit_key_value_config.cc
new file mode 100644
index 0000000000..90690c0514
--- /dev/null
+++ b/third_party/libwebrtc/test/explicit_key_value_config.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/explicit_key_value_config.h"
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+ExplicitKeyValueConfig::ExplicitKeyValueConfig(absl::string_view s) {
+ std::string::size_type field_start = 0;
+ while (field_start < s.size()) {
+ std::string::size_type separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing separator '/' after field trial key.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial key cannot be empty.";
+ std::string key(s.substr(field_start, separator_pos - field_start));
+ field_start = separator_pos + 1;
+
+ RTC_CHECK_LT(field_start, s.size())
+ << "Missing value after field trial key. String ended.";
+ separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing terminating '/' in field trial string.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial value cannot be empty.";
+ std::string value(s.substr(field_start, separator_pos - field_start));
+ field_start = separator_pos + 1;
+
+ key_value_map_[key] = value;
+ }
+ // This check is technically redundant due to earlier checks.
+ // We nevertheless keep the check to make it clear that the entire
+ // string has been processed, and without indexing past the end.
+ RTC_CHECK_EQ(field_start, s.size());
+}
+
+std::string ExplicitKeyValueConfig::GetValue(absl::string_view key) const {
+ auto it = key_value_map_.find(key);
+ if (it != key_value_map_.end())
+ return it->second;
+ return "";
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/explicit_key_value_config.h b/third_party/libwebrtc/test/explicit_key_value_config.h
new file mode 100644
index 0000000000..f14a10432c
--- /dev/null
+++ b/third_party/libwebrtc/test/explicit_key_value_config.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+#define TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
+
+#include <functional>
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/field_trials_registry.h"
+
+namespace webrtc {
+namespace test {
+
+class ExplicitKeyValueConfig : public FieldTrialsRegistry {
+ public:
+ explicit ExplicitKeyValueConfig(absl::string_view s);
+
+ private:
+ std::string GetValue(absl::string_view key) const override;
+
+ // Unlike std::less<std::string>, std::less<> is transparent and allows
+ // heterogeneous lookup directly with absl::string_view.
+ std::map<std::string, std::string, std::less<>> key_value_map_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_EXPLICIT_KEY_VALUE_CONFIG_H_
diff --git a/third_party/libwebrtc/test/fake_decoder.cc b/third_party/libwebrtc/test/fake_decoder.cc
new file mode 100644
index 0000000000..53fce37de1
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_decoder.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_decoder.h"
+
+#include <string.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+namespace test {
+
+FakeDecoder::FakeDecoder() : FakeDecoder(nullptr) {}
+
+FakeDecoder::FakeDecoder(TaskQueueFactory* task_queue_factory)
+ : callback_(nullptr),
+ width_(kDefaultWidth),
+ height_(kDefaultHeight),
+ task_queue_factory_(task_queue_factory),
+ decode_delay_ms_(0) {}
+
+bool FakeDecoder::Configure(const Settings& settings) {
+ return true;
+}
+
+int32_t FakeDecoder::Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ if (input._encodedWidth > 0 && input._encodedHeight > 0) {
+ width_ = input._encodedWidth;
+ height_ = input._encodedHeight;
+ }
+
+ rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
+ I420Buffer::SetBlack(buffer.get());
+ VideoFrame frame = VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_ms(render_time_ms)
+ .build();
+ frame.set_timestamp(input.Timestamp());
+ frame.set_ntp_time_ms(input.ntp_time_ms_);
+
+ if (decode_delay_ms_ == 0 || !task_queue_) {
+ callback_->Decoded(frame);
+ } else {
+ task_queue_->PostDelayedHighPrecisionTask(
+ [frame, this]() {
+ VideoFrame copy = frame;
+ callback_->Decoded(copy);
+ },
+ TimeDelta::Millis(decode_delay_ms_));
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void FakeDecoder::SetDelayedDecoding(int decode_delay_ms) {
+ RTC_CHECK(task_queue_factory_);
+ if (!task_queue_) {
+ task_queue_ = task_queue_factory_->CreateTaskQueue(
+ "fake_decoder", TaskQueueFactory::Priority::NORMAL);
+ }
+ decode_delay_ms_ = decode_delay_ms;
+}
+
+int32_t FakeDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t FakeDecoder::Release() {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+const char* FakeDecoder::kImplementationName = "fake_decoder";
+VideoDecoder::DecoderInfo FakeDecoder::GetDecoderInfo() const {
+ DecoderInfo info;
+ info.implementation_name = kImplementationName;
+ info.is_hardware_accelerated = true;
+ return info;
+}
+const char* FakeDecoder::ImplementationName() const {
+ return kImplementationName;
+}
+
+int32_t FakeH264Decoder::Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ uint8_t value = 0;
+ for (size_t i = 0; i < input.size(); ++i) {
+ uint8_t kStartCode[] = {0, 0, 0, 1};
+ if (i < input.size() - sizeof(kStartCode) &&
+ !memcmp(&input.data()[i], kStartCode, sizeof(kStartCode))) {
+ i += sizeof(kStartCode) + 1; // Skip start code and NAL header.
+ }
+ if (input.data()[i] != value) {
+ RTC_CHECK_EQ(value, input.data()[i])
+ << "Bitstream mismatch between sender and receiver.";
+ return -1;
+ }
+ ++value;
+ }
+ return FakeDecoder::Decode(input, missing_frames, render_time_ms);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_decoder.h b/third_party/libwebrtc/test/fake_decoder.h
new file mode 100644
index 0000000000..cea92b49be
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_decoder.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_DECODER_H_
+#define TEST_FAKE_DECODER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeDecoder : public VideoDecoder {
+ public:
+ enum { kDefaultWidth = 320, kDefaultHeight = 180 };
+
+ FakeDecoder();
+ explicit FakeDecoder(TaskQueueFactory* task_queue_factory);
+ virtual ~FakeDecoder() {}
+
+ bool Configure(const Settings& settings) override;
+
+ int32_t Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ DecoderInfo GetDecoderInfo() const override;
+ const char* ImplementationName() const override;
+
+ static const char* kImplementationName;
+
+ void SetDelayedDecoding(int decode_delay_ms);
+
+ private:
+ DecodedImageCallback* callback_;
+ int width_;
+ int height_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+ TaskQueueFactory* task_queue_factory_;
+ int decode_delay_ms_;
+};
+
+class FakeH264Decoder : public FakeDecoder {
+ public:
+ virtual ~FakeH264Decoder() {}
+
+ int32_t Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_DECODER_H_
diff --git a/third_party/libwebrtc/test/fake_encoded_frame.cc b/third_party/libwebrtc/test/fake_encoded_frame.cc
new file mode 100644
index 0000000000..32fa5d8ccf
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_encoded_frame.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_encoded_frame.h"
+
+#include <memory>
+
+#include "api/video/video_frame_type.h"
+
+namespace webrtc {
+
+void PrintTo(const EncodedFrame& frame,
+ std::ostream* os) /* no-presubmit-check TODO(webrtc:8982) */ {
+ *os << "EncodedFrame with id=" << frame.Id() << " rtp=" << frame.Timestamp()
+ << " size=" << frame.size() << " refs=[";
+ for (size_t ref = 0; ref < frame.num_references; ++ref) {
+ *os << frame.references[ref] << ",";
+ }
+ *os << "]";
+}
+
+namespace test {
+
+int64_t FakeEncodedFrame::ReceivedTime() const {
+ return received_time_;
+}
+
+int64_t FakeEncodedFrame::RenderTime() const {
+ return _renderTimeMs;
+}
+
+void FakeEncodedFrame::SetReceivedTime(int64_t received_time) {
+ received_time_ = received_time;
+}
+
+void FakeEncodedFrame::SetPayloadType(int payload_type) {
+ _payloadType = payload_type;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::Time(uint32_t rtp_timestamp) {
+ rtp_timestamp_ = rtp_timestamp;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::Id(int64_t frame_id) {
+ frame_id_ = frame_id;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::AsLast() {
+ last_spatial_layer_ = true;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::Refs(
+ const std::vector<int64_t>& references) {
+ references_ = references;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::PlayoutDelay(
+ VideoPlayoutDelay playout_delay) {
+ playout_delay_ = playout_delay;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::SpatialLayer(int spatial_layer) {
+ spatial_layer_ = spatial_layer;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::ReceivedTime(Timestamp receive_time) {
+ received_time_ = receive_time;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::Size(size_t size) {
+ size_ = size;
+ return *this;
+}
+
+std::unique_ptr<FakeEncodedFrame> FakeFrameBuilder::Build() {
+ RTC_CHECK_LE(references_.size(), EncodedFrame::kMaxFrameReferences);
+
+ auto frame = std::make_unique<FakeEncodedFrame>();
+ frame->is_last_spatial_layer = last_spatial_layer_;
+ frame->SetEncodedData(EncodedImageBuffer::Create(size_));
+
+ if (rtp_timestamp_)
+ frame->SetTimestamp(*rtp_timestamp_);
+ if (frame_id_)
+ frame->SetId(*frame_id_);
+ if (playout_delay_)
+ frame->SetPlayoutDelay(*playout_delay_);
+ frame->SetFrameType(references_.empty() ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta);
+ for (int64_t ref : references_) {
+ frame->references[frame->num_references] = ref;
+ frame->num_references++;
+ }
+ if (spatial_layer_)
+ frame->SetSpatialIndex(spatial_layer_);
+ if (received_time_)
+ frame->SetReceivedTime(received_time_->ms());
+ if (payload_type_)
+ frame->SetPayloadType(*payload_type_);
+ if (ntp_time_)
+ frame->ntp_time_ms_ = ntp_time_->ms();
+ if (rotation_)
+ frame->rotation_ = *rotation_;
+ if (packet_infos_)
+ frame->SetPacketInfos(*packet_infos_);
+ return frame;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::PayloadType(int payload_type) {
+ payload_type_ = payload_type;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::NtpTime(Timestamp ntp_time) {
+ ntp_time_ = ntp_time;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::Rotation(VideoRotation rotation) {
+ rotation_ = rotation;
+ return *this;
+}
+
+FakeFrameBuilder& FakeFrameBuilder::PacketInfos(RtpPacketInfos packet_infos) {
+ packet_infos_ = packet_infos;
+ return *this;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_encoded_frame.h b/third_party/libwebrtc/test/fake_encoded_frame.h
new file mode 100644
index 0000000000..a5b2aca4a1
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_encoded_frame.h
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_ENCODED_FRAME_H_
+#define TEST_FAKE_ENCODED_FRAME_H_
+
+#include <memory>
+#include <ostream> // no-presubmit-check TODO(webrtc:8982)
+#include <vector>
+
+#include "api/rtp_packet_infos.h"
+#include "api/video/encoded_frame.h"
+#include "api/video/video_rotation.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+// For test printing.
+void PrintTo(const EncodedFrame& frame,
+ std::ostream* os); // no-presubmit-check TODO(webrtc:8982)
+
+namespace test {
+
+class FakeEncodedFrame : public EncodedFrame {
+ public:
+ // Always 10ms delay and on time.
+ int64_t ReceivedTime() const override;
+ int64_t RenderTime() const override;
+
+ // Setters for protected variables.
+ void SetReceivedTime(int64_t received_time);
+ void SetPayloadType(int payload_type);
+
+ private:
+ int64_t received_time_;
+};
+
+MATCHER_P(WithId, id, "") {
+ return ::testing::Matches(::testing::Eq(id))(arg.Id());
+}
+
+MATCHER_P(FrameWithSize, id, "") {
+ return ::testing::Matches(::testing::Eq(id))(arg.size());
+}
+
+MATCHER_P(RtpTimestamp, ts, "") {
+ return ts == arg.Timestamp();
+}
+
+class FakeFrameBuilder {
+ public:
+ FakeFrameBuilder& Time(uint32_t rtp_timestamp);
+ FakeFrameBuilder& Id(int64_t frame_id);
+ FakeFrameBuilder& AsLast();
+ FakeFrameBuilder& Refs(const std::vector<int64_t>& references);
+ FakeFrameBuilder& PlayoutDelay(VideoPlayoutDelay playout_delay);
+ FakeFrameBuilder& SpatialLayer(int spatial_layer);
+ FakeFrameBuilder& ReceivedTime(Timestamp receive_time);
+ FakeFrameBuilder& Size(size_t size);
+ FakeFrameBuilder& PayloadType(int payload_type);
+ FakeFrameBuilder& NtpTime(Timestamp ntp_time);
+ FakeFrameBuilder& Rotation(VideoRotation rotation);
+ FakeFrameBuilder& PacketInfos(RtpPacketInfos packet_infos);
+ std::unique_ptr<FakeEncodedFrame> Build();
+
+ private:
+ absl::optional<uint32_t> rtp_timestamp_;
+ absl::optional<int64_t> frame_id_;
+ absl::optional<VideoPlayoutDelay> playout_delay_;
+ absl::optional<int> spatial_layer_;
+ absl::optional<Timestamp> received_time_;
+ absl::optional<int> payload_type_;
+ absl::optional<Timestamp> ntp_time_;
+ absl::optional<VideoRotation> rotation_;
+ absl::optional<RtpPacketInfos> packet_infos_;
+ std::vector<int64_t> references_;
+ bool last_spatial_layer_ = false;
+ size_t size_ = 10;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_ENCODED_FRAME_H_
diff --git a/third_party/libwebrtc/test/fake_encoder.cc b/third_party/libwebrtc/test/fake_encoder.cc
new file mode 100644
index 0000000000..bfc72c123d
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_encoder.cc
@@ -0,0 +1,441 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_encoder.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <cstdint>
+#include <memory>
+#include <string>
+
+#include "api/video/video_content_type.h"
+#include "modules/video_coding/codecs/h264/include/h264_globals.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/sleep.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+const int kKeyframeSizeFactor = 5;
+
+// Inverse of proportion of frames assigned to each temporal layer for all
+// possible temporal layers numbers.
+const int kTemporalLayerRateFactor[4][4] = {
+ {1, 0, 0, 0}, // 1/1
+ {2, 2, 0, 0}, // 1/2 + 1/2
+ {4, 4, 2, 0}, // 1/4 + 1/4 + 1/2
+ {8, 8, 4, 2}, // 1/8 + 1/8 + 1/4 + 1/2
+};
+
+void WriteCounter(unsigned char* payload, uint32_t counter) {
+ payload[0] = (counter & 0x00FF);
+ payload[1] = (counter & 0xFF00) >> 8;
+ payload[2] = (counter & 0xFF0000) >> 16;
+ payload[3] = (counter & 0xFF000000) >> 24;
+}
+
+} // namespace
+
+FakeEncoder::FakeEncoder(Clock* clock)
+ : clock_(clock),
+ num_initializations_(0),
+ callback_(nullptr),
+ max_target_bitrate_kbps_(-1),
+ pending_keyframe_(true),
+ counter_(0),
+ debt_bytes_(0) {
+ for (bool& used : used_layers_) {
+ used = false;
+ }
+}
+
+void FakeEncoder::SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) {
+ // Ignored.
+}
+
+void FakeEncoder::SetMaxBitrate(int max_kbps) {
+ RTC_DCHECK_GE(max_kbps, -1); // max_kbps == -1 disables it.
+ MutexLock lock(&mutex_);
+ max_target_bitrate_kbps_ = max_kbps;
+ SetRatesLocked(current_rate_settings_);
+}
+
+void FakeEncoder::SetQp(int qp) {
+ MutexLock lock(&mutex_);
+ qp_ = qp;
+}
+
+int32_t FakeEncoder::InitEncode(const VideoCodec* config,
+ const Settings& settings) {
+ MutexLock lock(&mutex_);
+ config_ = *config;
+ ++num_initializations_;
+ current_rate_settings_.bitrate.SetBitrate(0, 0, config_.startBitrate * 1000);
+ current_rate_settings_.framerate_fps = config_.maxFramerate;
+ pending_keyframe_ = true;
+ last_frame_info_ = FrameInfo();
+ return 0;
+}
+
+int32_t FakeEncoder::Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) {
+ unsigned char max_framerate;
+ unsigned char num_simulcast_streams;
+ SimulcastStream simulcast_streams[kMaxSimulcastStreams];
+ EncodedImageCallback* callback;
+ RateControlParameters rates;
+ bool keyframe;
+ uint32_t counter;
+ absl::optional<int> qp;
+ {
+ MutexLock lock(&mutex_);
+ max_framerate = config_.maxFramerate;
+ num_simulcast_streams = config_.numberOfSimulcastStreams;
+ for (int i = 0; i < num_simulcast_streams; ++i) {
+ simulcast_streams[i] = config_.simulcastStream[i];
+ }
+ callback = callback_;
+ rates = current_rate_settings_;
+ if (rates.framerate_fps <= 0.0) {
+ rates.framerate_fps = max_framerate;
+ }
+ keyframe = pending_keyframe_;
+ pending_keyframe_ = false;
+ counter = counter_++;
+ qp = qp_;
+ }
+
+ FrameInfo frame_info =
+ NextFrame(frame_types, keyframe, num_simulcast_streams, rates.bitrate,
+ simulcast_streams, static_cast<int>(rates.framerate_fps + 0.5));
+ for (uint8_t i = 0; i < frame_info.layers.size(); ++i) {
+ constexpr int kMinPayLoadLength = 14;
+ if (frame_info.layers[i].size < kMinPayLoadLength) {
+ // Drop this temporal layer.
+ continue;
+ }
+
+ auto buffer = EncodedImageBuffer::Create(frame_info.layers[i].size);
+ // Fill the buffer with arbitrary data. Write someting to make Asan happy.
+ memset(buffer->data(), 9, frame_info.layers[i].size);
+ // Write a counter to the image to make each frame unique.
+ WriteCounter(buffer->data() + frame_info.layers[i].size - 4, counter);
+
+ EncodedImage encoded;
+ encoded.SetEncodedData(buffer);
+
+ encoded.SetTimestamp(input_image.timestamp());
+ encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+ encoded._encodedWidth = simulcast_streams[i].width;
+ encoded._encodedHeight = simulcast_streams[i].height;
+ if (qp)
+ encoded.qp_ = *qp;
+ encoded.SetSpatialIndex(i);
+ CodecSpecificInfo codec_specific = EncodeHook(encoded, buffer);
+
+ if (callback->OnEncodedImage(encoded, &codec_specific).error !=
+ EncodedImageCallback::Result::OK) {
+ return -1;
+ }
+ }
+ return 0;
+}
+
+CodecSpecificInfo FakeEncoder::EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer) {
+ CodecSpecificInfo codec_specific;
+ codec_specific.codecType = kVideoCodecGeneric;
+ return codec_specific;
+}
+
+FakeEncoder::FrameInfo FakeEncoder::NextFrame(
+ const std::vector<VideoFrameType>* frame_types,
+ bool keyframe,
+ uint8_t num_simulcast_streams,
+ const VideoBitrateAllocation& target_bitrate,
+ SimulcastStream simulcast_streams[kMaxSimulcastStreams],
+ int framerate) {
+ FrameInfo frame_info;
+ frame_info.keyframe = keyframe;
+
+ if (frame_types) {
+ for (VideoFrameType frame_type : *frame_types) {
+ if (frame_type == VideoFrameType::kVideoFrameKey) {
+ frame_info.keyframe = true;
+ break;
+ }
+ }
+ }
+
+ MutexLock lock(&mutex_);
+ for (uint8_t i = 0; i < num_simulcast_streams; ++i) {
+ if (target_bitrate.GetBitrate(i, 0) > 0) {
+ int temporal_id = last_frame_info_.layers.size() > i
+ ? ++last_frame_info_.layers[i].temporal_id %
+ simulcast_streams[i].numberOfTemporalLayers
+ : 0;
+ frame_info.layers.emplace_back(0, temporal_id);
+ }
+ }
+
+ if (last_frame_info_.layers.size() < frame_info.layers.size()) {
+ // A new keyframe is needed since a new layer will be added.
+ frame_info.keyframe = true;
+ }
+
+ for (uint8_t i = 0; i < frame_info.layers.size(); ++i) {
+ FrameInfo::SpatialLayer& layer_info = frame_info.layers[i];
+ if (frame_info.keyframe) {
+ layer_info.temporal_id = 0;
+ size_t avg_frame_size =
+ (target_bitrate.GetBitrate(i, 0) + 7) *
+ kTemporalLayerRateFactor[frame_info.layers.size() - 1][i] /
+ (8 * framerate);
+
+ // The first frame is a key frame and should be larger.
+ // Store the overshoot bytes and distribute them over the coming frames,
+ // so that we on average meet the bitrate target.
+ debt_bytes_ += (kKeyframeSizeFactor - 1) * avg_frame_size;
+ layer_info.size = kKeyframeSizeFactor * avg_frame_size;
+ } else {
+ size_t avg_frame_size =
+ (target_bitrate.GetBitrate(i, layer_info.temporal_id) + 7) *
+ kTemporalLayerRateFactor[frame_info.layers.size() - 1][i] /
+ (8 * framerate);
+ layer_info.size = avg_frame_size;
+ if (debt_bytes_ > 0) {
+ // Pay at most half of the frame size for old debts.
+ size_t payment_size = std::min(avg_frame_size / 2, debt_bytes_);
+ debt_bytes_ -= payment_size;
+ layer_info.size -= payment_size;
+ }
+ }
+ }
+ last_frame_info_ = frame_info;
+ return frame_info;
+}
+
+int32_t FakeEncoder::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ MutexLock lock(&mutex_);
+ callback_ = callback;
+ return 0;
+}
+
+int32_t FakeEncoder::Release() {
+ return 0;
+}
+
+void FakeEncoder::SetRates(const RateControlParameters& parameters) {
+ MutexLock lock(&mutex_);
+ SetRatesLocked(parameters);
+}
+
+void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) {
+ current_rate_settings_ = parameters;
+ int allocated_bitrate_kbps = parameters.bitrate.get_sum_kbps();
+
+ // Scale bitrate allocation to not exceed the given max target bitrate.
+ if (max_target_bitrate_kbps_ > 0 &&
+ allocated_bitrate_kbps > max_target_bitrate_kbps_) {
+ for (uint8_t spatial_idx = 0; spatial_idx < kMaxSpatialLayers;
+ ++spatial_idx) {
+ for (uint8_t temporal_idx = 0; temporal_idx < kMaxTemporalStreams;
+ ++temporal_idx) {
+ if (current_rate_settings_.bitrate.HasBitrate(spatial_idx,
+ temporal_idx)) {
+ uint32_t bitrate = current_rate_settings_.bitrate.GetBitrate(
+ spatial_idx, temporal_idx);
+ bitrate = static_cast<uint32_t>(
+ (bitrate * int64_t{max_target_bitrate_kbps_}) /
+ allocated_bitrate_kbps);
+ current_rate_settings_.bitrate.SetBitrate(spatial_idx, temporal_idx,
+ bitrate);
+ }
+ }
+ }
+ }
+}
+
+const char* FakeEncoder::kImplementationName = "fake_encoder";
+VideoEncoder::EncoderInfo FakeEncoder::GetEncoderInfo() const {
+ EncoderInfo info;
+ info.implementation_name = kImplementationName;
+ info.is_hardware_accelerated = true;
+ MutexLock lock(&mutex_);
+ for (int sid = 0; sid < config_.numberOfSimulcastStreams; ++sid) {
+ int number_of_temporal_layers =
+ config_.simulcastStream[sid].numberOfTemporalLayers;
+ info.fps_allocation[sid].clear();
+ for (int tid = 0; tid < number_of_temporal_layers; ++tid) {
+ // {1/4, 1/2, 1} allocation for num layers = 3.
+ info.fps_allocation[sid].push_back(255 /
+ (number_of_temporal_layers - tid));
+ }
+ }
+ return info;
+}
+
+int FakeEncoder::GetConfiguredInputFramerate() const {
+ MutexLock lock(&mutex_);
+ return static_cast<int>(current_rate_settings_.framerate_fps + 0.5);
+}
+
+int FakeEncoder::GetNumInitializations() const {
+ MutexLock lock(&mutex_);
+ return num_initializations_;
+}
+
+const VideoCodec& FakeEncoder::config() const {
+ MutexLock lock(&mutex_);
+ return config_;
+}
+
+FakeH264Encoder::FakeH264Encoder(Clock* clock)
+ : FakeEncoder(clock), idr_counter_(0) {}
+
+CodecSpecificInfo FakeH264Encoder::EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer) {
+ static constexpr std::array<uint8_t, 3> kStartCode = {0, 0, 1};
+ const size_t kSpsSize = 8;
+ const size_t kPpsSize = 11;
+ const int kIdrFrequency = 10;
+ int current_idr_counter;
+ {
+ MutexLock lock(&local_mutex_);
+ current_idr_counter = idr_counter_;
+ ++idr_counter_;
+ }
+ for (size_t i = 0; i < encoded_image.size(); ++i) {
+ buffer->data()[i] = static_cast<uint8_t>(i);
+ }
+
+ if (current_idr_counter % kIdrFrequency == 0 &&
+ encoded_image.size() > kSpsSize + kPpsSize + 1 + 3 * kStartCode.size()) {
+ const size_t kSpsNalHeader = 0x67;
+ const size_t kPpsNalHeader = 0x68;
+ const size_t kIdrNalHeader = 0x65;
+ uint8_t* data = buffer->data();
+ memcpy(data, kStartCode.data(), kStartCode.size());
+ data += kStartCode.size();
+ data[0] = kSpsNalHeader;
+ data += kSpsSize;
+
+ memcpy(data, kStartCode.data(), kStartCode.size());
+ data += kStartCode.size();
+ data[0] = kPpsNalHeader;
+ data += kPpsSize;
+
+ memcpy(data, kStartCode.data(), kStartCode.size());
+ data += kStartCode.size();
+ data[0] = kIdrNalHeader;
+ } else {
+ memcpy(buffer->data(), kStartCode.data(), kStartCode.size());
+ const size_t kNalHeader = 0x41;
+ buffer->data()[kStartCode.size()] = kNalHeader;
+ }
+
+ CodecSpecificInfo codec_specific;
+ codec_specific.codecType = kVideoCodecH264;
+ codec_specific.codecSpecific.H264.packetization_mode =
+ H264PacketizationMode::NonInterleaved;
+ return codec_specific;
+}
+
+DelayedEncoder::DelayedEncoder(Clock* clock, int delay_ms)
+ : test::FakeEncoder(clock), delay_ms_(delay_ms) {
+ // The encoder could be created on a different thread than
+ // it is being used on.
+ sequence_checker_.Detach();
+}
+
+void DelayedEncoder::SetDelay(int delay_ms) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ delay_ms_ = delay_ms;
+}
+
+int32_t DelayedEncoder::Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ SleepMs(delay_ms_);
+
+ return FakeEncoder::Encode(input_image, frame_types);
+}
+
+MultithreadedFakeH264Encoder::MultithreadedFakeH264Encoder(
+ Clock* clock,
+ TaskQueueFactory* task_queue_factory)
+ : test::FakeH264Encoder(clock),
+ task_queue_factory_(task_queue_factory),
+ current_queue_(0),
+ queue1_(nullptr),
+ queue2_(nullptr) {
+ // The encoder could be created on a different thread than
+ // it is being used on.
+ sequence_checker_.Detach();
+}
+
+int32_t MultithreadedFakeH264Encoder::InitEncode(const VideoCodec* config,
+ const Settings& settings) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ queue1_ = task_queue_factory_->CreateTaskQueue(
+ "Queue 1", TaskQueueFactory::Priority::NORMAL);
+ queue2_ = task_queue_factory_->CreateTaskQueue(
+ "Queue 2", TaskQueueFactory::Priority::NORMAL);
+
+ return FakeH264Encoder::InitEncode(config, settings);
+}
+
+int32_t MultithreadedFakeH264Encoder::Encode(
+ const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ TaskQueueBase* queue =
+ (current_queue_++ % 2 == 0) ? queue1_.get() : queue2_.get();
+
+ if (!queue) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ queue->PostTask([this, input_image, frame_types = *frame_types] {
+ EncodeCallback(input_image, &frame_types);
+ });
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MultithreadedFakeH264Encoder::EncodeCallback(
+ const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) {
+ return FakeH264Encoder::Encode(input_image, frame_types);
+}
+
+int32_t MultithreadedFakeH264Encoder::Release() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ queue1_.reset();
+ queue2_.reset();
+
+ return FakeH264Encoder::Release();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_encoder.h b/third_party/libwebrtc/test/fake_encoder.h
new file mode 100644
index 0000000000..65e03155d7
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_encoder.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_ENCODER_H_
+#define TEST_FAKE_ENCODER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <vector>
+
+#include "api/fec_controller_override.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeEncoder : public VideoEncoder {
+ public:
+ explicit FakeEncoder(Clock* clock);
+ virtual ~FakeEncoder() = default;
+
+ // Sets max bitrate. Not thread-safe, call before registering the encoder.
+ void SetMaxBitrate(int max_kbps) RTC_LOCKS_EXCLUDED(mutex_);
+ void SetQp(int qp) RTC_LOCKS_EXCLUDED(mutex_);
+
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override;
+
+ int32_t InitEncode(const VideoCodec* config, const Settings& settings)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t RegisterEncodeCompleteCallback(EncodedImageCallback* callback)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ int32_t Release() override;
+ void SetRates(const RateControlParameters& parameters)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ EncoderInfo GetEncoderInfo() const override;
+
+ int GetConfiguredInputFramerate() const RTC_LOCKS_EXCLUDED(mutex_);
+ int GetNumInitializations() const RTC_LOCKS_EXCLUDED(mutex_);
+ const VideoCodec& config() const RTC_LOCKS_EXCLUDED(mutex_);
+
+ static const char* kImplementationName;
+
+ protected:
+ struct FrameInfo {
+ bool keyframe;
+ struct SpatialLayer {
+ SpatialLayer() = default;
+ SpatialLayer(int size, int temporal_id)
+ : size(size), temporal_id(temporal_id) {}
+ // Size of a current frame in the layer.
+ int size = 0;
+ // Temporal index of a current frame in the layer.
+ int temporal_id = 0;
+ };
+ std::vector<SpatialLayer> layers;
+ };
+
+ FrameInfo NextFrame(const std::vector<VideoFrameType>* frame_types,
+ bool keyframe,
+ uint8_t num_simulcast_streams,
+ const VideoBitrateAllocation& target_bitrate,
+ SimulcastStream simulcast_streams[kMaxSimulcastStreams],
+ int framerate) RTC_LOCKS_EXCLUDED(mutex_);
+
+ // Called before the frame is passed to callback_->OnEncodedImage, to let
+ // subclasses fill out CodecSpecificInfo, possibly modify `encoded_image` or
+ // `buffer`.
+ virtual CodecSpecificInfo EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer);
+
+ void SetRatesLocked(const RateControlParameters& parameters)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ FrameInfo last_frame_info_ RTC_GUARDED_BY(mutex_);
+ Clock* const clock_;
+
+ VideoCodec config_ RTC_GUARDED_BY(mutex_);
+ int num_initializations_ RTC_GUARDED_BY(mutex_);
+ EncodedImageCallback* callback_ RTC_GUARDED_BY(mutex_);
+ RateControlParameters current_rate_settings_ RTC_GUARDED_BY(mutex_);
+ int max_target_bitrate_kbps_ RTC_GUARDED_BY(mutex_);
+ bool pending_keyframe_ RTC_GUARDED_BY(mutex_);
+ uint32_t counter_ RTC_GUARDED_BY(mutex_);
+ mutable Mutex mutex_;
+ bool used_layers_[kMaxSimulcastStreams];
+ absl::optional<int> qp_ RTC_GUARDED_BY(mutex_);
+
+ // Current byte debt to be payed over a number of frames.
+ // The debt is acquired by keyframes overshooting the bitrate target.
+ size_t debt_bytes_;
+};
+
+class FakeH264Encoder : public FakeEncoder {
+ public:
+ explicit FakeH264Encoder(Clock* clock);
+ virtual ~FakeH264Encoder() = default;
+
+ private:
+ CodecSpecificInfo EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer) override;
+
+ int idr_counter_ RTC_GUARDED_BY(local_mutex_);
+ Mutex local_mutex_;
+};
+
+class DelayedEncoder : public test::FakeEncoder {
+ public:
+ DelayedEncoder(Clock* clock, int delay_ms);
+ virtual ~DelayedEncoder() = default;
+
+ void SetDelay(int delay_ms);
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ private:
+ int delay_ms_ RTC_GUARDED_BY(sequence_checker_);
+ SequenceChecker sequence_checker_;
+};
+
+// This class implements a multi-threaded fake encoder by posting
+// FakeH264Encoder::Encode(.) tasks to `queue1_` and `queue2_`, in an
+// alternating fashion. The class itself does not need to be thread safe,
+// as it is called from the task queue in VideoStreamEncoder.
+class MultithreadedFakeH264Encoder : public test::FakeH264Encoder {
+ public:
+ MultithreadedFakeH264Encoder(Clock* clock,
+ TaskQueueFactory* task_queue_factory);
+ virtual ~MultithreadedFakeH264Encoder() = default;
+
+ int32_t InitEncode(const VideoCodec* config,
+ const Settings& settings) override;
+
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ int32_t EncodeCallback(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types);
+
+ int32_t Release() override;
+
+ protected:
+ TaskQueueFactory* const task_queue_factory_;
+ int current_queue_ RTC_GUARDED_BY(sequence_checker_);
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> queue1_
+ RTC_GUARDED_BY(sequence_checker_);
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> queue2_
+ RTC_GUARDED_BY(sequence_checker_);
+ SequenceChecker sequence_checker_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_ENCODER_H_
diff --git a/third_party/libwebrtc/test/fake_texture_frame.cc b/third_party/libwebrtc/test/fake_texture_frame.cc
new file mode 100644
index 0000000000..9c17e4c1c0
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_texture_frame.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_texture_frame.h"
+
+#include "api/video/i420_buffer.h"
+
+namespace webrtc {
+namespace test {
+
+VideoFrame FakeNativeBuffer::CreateFrame(int width,
+ int height,
+ uint32_t timestamp,
+ int64_t render_time_ms,
+ VideoRotation rotation) {
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(
+ rtc::make_ref_counted<FakeNativeBuffer>(width, height))
+ .set_timestamp_rtp(timestamp)
+ .set_timestamp_ms(render_time_ms)
+ .set_rotation(rotation)
+ .build();
+}
+
+VideoFrameBuffer::Type FakeNativeBuffer::type() const {
+ return Type::kNative;
+}
+
+int FakeNativeBuffer::width() const {
+ return width_;
+}
+
+int FakeNativeBuffer::height() const {
+ return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> FakeNativeBuffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
+ I420Buffer::SetBlack(buffer.get());
+ return buffer;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_texture_frame.h b/third_party/libwebrtc/test/fake_texture_frame.h
new file mode 100644
index 0000000000..1b25112e01
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_texture_frame.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_FAKE_TEXTURE_FRAME_H_
+#define TEST_FAKE_TEXTURE_FRAME_H_
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeNativeBuffer : public VideoFrameBuffer {
+ public:
+ static VideoFrame CreateFrame(int width,
+ int height,
+ uint32_t timestamp,
+ int64_t render_time_ms,
+ VideoRotation rotation);
+
+ FakeNativeBuffer(int width, int height) : width_(width), height_(height) {}
+
+ Type type() const override;
+ int width() const override;
+ int height() const override;
+
+ private:
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ const int width_;
+ const int height_;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // TEST_FAKE_TEXTURE_FRAME_H_
diff --git a/third_party/libwebrtc/test/fake_videorenderer.h b/third_party/libwebrtc/test/fake_videorenderer.h
new file mode 100644
index 0000000000..a2c953ff77
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_videorenderer.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_VIDEORENDERER_H_
+#define TEST_FAKE_VIDEORENDERER_H_
+
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ void OnFrame(const webrtc::VideoFrame& frame) override {}
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_VIDEORENDERER_H_
diff --git a/third_party/libwebrtc/test/fake_vp8_decoder.cc b/third_party/libwebrtc/test/fake_vp8_decoder.cc
new file mode 100644
index 0000000000..4c2f55a668
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_vp8_decoder.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_vp8_decoder.h"
+
+#include <stddef.h>
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+// Read width and height from the payload of the frame if it is a key frame the
+// same way as the real VP8 decoder.
+// FakeEncoder writes width, height and frame type.
+void ParseFakeVp8(const unsigned char* data, int* width, int* height) {
+ bool key_frame = data[0] == 0;
+ if (key_frame) {
+ *width = ((data[7] << 8) + data[6]) & 0x3FFF;
+ *height = ((data[9] << 8) + data[8]) & 0x3FFF;
+ }
+}
+} // namespace
+
+FakeVp8Decoder::FakeVp8Decoder() : callback_(nullptr), width_(0), height_(0) {}
+
+bool FakeVp8Decoder::Configure(const Settings& settings) {
+ return true;
+}
+
+int32_t FakeVp8Decoder::Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ constexpr size_t kMinPayLoadHeaderLength = 10;
+ if (input.size() < kMinPayLoadHeaderLength) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ParseFakeVp8(input.data(), &width_, &height_);
+
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(width_, height_))
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_ms(render_time_ms)
+ .build();
+ frame.set_timestamp(input.Timestamp());
+ frame.set_ntp_time_ms(input.ntp_time_ms_);
+
+ callback_->Decoded(frame, /*decode_time_ms=*/absl::nullopt,
+ /*qp=*/absl::nullopt);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t FakeVp8Decoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t FakeVp8Decoder::Release() {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+const char* FakeVp8Decoder::kImplementationName = "fake_vp8_decoder";
+VideoDecoder::DecoderInfo FakeVp8Decoder::GetDecoderInfo() const {
+ DecoderInfo info;
+ info.implementation_name = kImplementationName;
+ info.is_hardware_accelerated = false;
+ return info;
+}
+
+const char* FakeVp8Decoder::ImplementationName() const {
+ return kImplementationName;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_vp8_decoder.h b/third_party/libwebrtc/test/fake_vp8_decoder.h
new file mode 100644
index 0000000000..391ee374d4
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_vp8_decoder.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_VP8_DECODER_H_
+#define TEST_FAKE_VP8_DECODER_H_
+
+#include <stdint.h>
+
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeVp8Decoder : public VideoDecoder {
+ public:
+ FakeVp8Decoder();
+ ~FakeVp8Decoder() override {}
+
+ bool Configure(const Settings& settings) override;
+
+ int32_t Decode(const EncodedImage& input,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ DecoderInfo GetDecoderInfo() const override;
+ const char* ImplementationName() const override;
+ static const char* kImplementationName;
+
+ private:
+ DecodedImageCallback* callback_;
+ int width_;
+ int height_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_VP8_DECODER_H_
diff --git a/third_party/libwebrtc/test/fake_vp8_encoder.cc b/third_party/libwebrtc/test/fake_vp8_encoder.cc
new file mode 100644
index 0000000000..625d7a6473
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_vp8_encoder.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_vp8_encoder.h"
+
+#include <algorithm>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/vp8_temporal_layers.h"
+#include "api/video_codecs/vp8_temporal_layers_factory.h"
+#include "modules/video_coding/codecs/interface/common_constants.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/utility/simulcast_utility.h"
+
+namespace {
+
+// Write width and height to the payload the same way as the real encoder does.
+// It requires that `payload` has a size of at least kMinPayLoadHeaderLength.
+void WriteFakeVp8(unsigned char* payload,
+ int width,
+ int height,
+ bool key_frame) {
+ payload[0] = key_frame ? 0 : 0x01;
+
+ if (key_frame) {
+ payload[9] = (height & 0x3F00) >> 8;
+ payload[8] = (height & 0x00FF);
+
+ payload[7] = (width & 0x3F00) >> 8;
+ payload[6] = (width & 0x00FF);
+ }
+}
+} // namespace
+
+namespace webrtc {
+
+namespace test {
+
+FakeVp8Encoder::FakeVp8Encoder(Clock* clock) : FakeEncoder(clock) {
+ sequence_checker_.Detach();
+}
+
+int32_t FakeVp8Encoder::InitEncode(const VideoCodec* config,
+ const Settings& settings) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto result = FakeEncoder::InitEncode(config, settings);
+ if (result != WEBRTC_VIDEO_CODEC_OK) {
+ return result;
+ }
+
+ Vp8TemporalLayersFactory factory;
+ frame_buffer_controller_ =
+ factory.Create(*config, settings, &fec_controller_override_);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t FakeVp8Encoder::Release() {
+ auto result = FakeEncoder::Release();
+ sequence_checker_.Detach();
+ return result;
+}
+
+CodecSpecificInfo FakeVp8Encoder::PopulateCodecSpecific(
+ size_t size_bytes,
+ VideoFrameType frame_type,
+ int stream_idx,
+ uint32_t timestamp) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ CodecSpecificInfo codec_specific;
+ codec_specific.codecType = kVideoCodecVP8;
+ codec_specific.codecSpecific.VP8.keyIdx = kNoKeyIdx;
+ codec_specific.codecSpecific.VP8.nonReference = false;
+ if (size_bytes > 0) {
+ frame_buffer_controller_->OnEncodeDone(
+ stream_idx, timestamp, size_bytes,
+ frame_type == VideoFrameType::kVideoFrameKey, -1, &codec_specific);
+ } else {
+ frame_buffer_controller_->OnFrameDropped(stream_idx, timestamp);
+ }
+ return codec_specific;
+}
+
+CodecSpecificInfo FakeVp8Encoder::EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ uint8_t stream_idx = encoded_image.SpatialIndex().value_or(0);
+ frame_buffer_controller_->NextFrameConfig(stream_idx,
+ encoded_image.Timestamp());
+ CodecSpecificInfo codec_specific =
+ PopulateCodecSpecific(encoded_image.size(), encoded_image._frameType,
+ stream_idx, encoded_image.Timestamp());
+
+ // Write width and height to the payload the same way as the real encoder
+ // does.
+ WriteFakeVp8(buffer->data(), encoded_image._encodedWidth,
+ encoded_image._encodedHeight,
+ encoded_image._frameType == VideoFrameType::kVideoFrameKey);
+ return codec_specific;
+}
+
+VideoEncoder::EncoderInfo FakeVp8Encoder::GetEncoderInfo() const {
+ EncoderInfo info;
+ info.implementation_name = "FakeVp8Encoder";
+ MutexLock lock(&mutex_);
+ for (int sid = 0; sid < config_.numberOfSimulcastStreams; ++sid) {
+ int number_of_temporal_layers =
+ config_.simulcastStream[sid].numberOfTemporalLayers;
+ info.fps_allocation[sid].clear();
+ for (int tid = 0; tid < number_of_temporal_layers; ++tid) {
+ // {1/4, 1/2, 1} allocation for num layers = 3.
+ info.fps_allocation[sid].push_back(255 /
+ (number_of_temporal_layers - tid));
+ }
+ }
+ return info;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fake_vp8_encoder.h b/third_party/libwebrtc/test/fake_vp8_encoder.h
new file mode 100644
index 0000000000..6aaf547379
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_vp8_encoder.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FAKE_VP8_ENCODER_H_
+#define TEST_FAKE_VP8_ENCODER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/fec_controller_override.h"
+#include "api/sequence_checker.h"
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/vp8_frame_buffer_controller.h"
+#include "api/video_codecs/vp8_temporal_layers.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+#include "test/fake_encoder.h"
+
+namespace webrtc {
+namespace test {
+
+class FakeVp8Encoder : public FakeEncoder {
+ public:
+ explicit FakeVp8Encoder(Clock* clock);
+ virtual ~FakeVp8Encoder() = default;
+
+ int32_t InitEncode(const VideoCodec* config,
+ const Settings& settings) override;
+
+ int32_t Release() override;
+
+ EncoderInfo GetEncoderInfo() const override;
+
+ private:
+ CodecSpecificInfo PopulateCodecSpecific(size_t size_bytes,
+ VideoFrameType frame_type,
+ int stream_idx,
+ uint32_t timestamp);
+
+ CodecSpecificInfo EncodeHook(
+ EncodedImage& encoded_image,
+ rtc::scoped_refptr<EncodedImageBuffer> buffer) override;
+
+ SequenceChecker sequence_checker_;
+
+ class FakeFecControllerOverride : public FecControllerOverride {
+ public:
+ ~FakeFecControllerOverride() override = default;
+
+ void SetFecAllowed(bool fec_allowed) override {}
+ };
+
+ FakeFecControllerOverride fec_controller_override_
+ RTC_GUARDED_BY(sequence_checker_);
+
+ std::unique_ptr<Vp8FrameBufferController> frame_buffer_controller_
+ RTC_GUARDED_BY(sequence_checker_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FAKE_VP8_ENCODER_H_
diff --git a/third_party/libwebrtc/test/fake_vp8_encoder_unittest.cc b/third_party/libwebrtc/test/fake_vp8_encoder_unittest.cc
new file mode 100644
index 0000000000..e79e8e421b
--- /dev/null
+++ b/third_party/libwebrtc/test/fake_vp8_encoder_unittest.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fake_vp8_encoder.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/test/create_simulcast_test_fixture.h"
+#include "api/test/simulcast_test_fixture.h"
+#include "api/test/video/function_video_decoder_factory.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "modules/video_coding/utility/simulcast_test_fixture_impl.h"
+#include "test/fake_vp8_decoder.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+std::unique_ptr<SimulcastTestFixture> CreateSpecificSimulcastTestFixture() {
+ std::unique_ptr<VideoEncoderFactory> encoder_factory =
+ std::make_unique<FunctionVideoEncoderFactory>([]() {
+ return std::make_unique<FakeVp8Encoder>(Clock::GetRealTimeClock());
+ });
+ std::unique_ptr<VideoDecoderFactory> decoder_factory =
+ std::make_unique<FunctionVideoDecoderFactory>(
+ []() { return std::make_unique<FakeVp8Decoder>(); });
+ return CreateSimulcastTestFixture(std::move(encoder_factory),
+ std::move(decoder_factory),
+ SdpVideoFormat("VP8"));
+}
+} // namespace
+
+TEST(TestFakeVp8Codec, TestKeyFrameRequestsOnAllStreams) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestKeyFrameRequestsOnAllStreams();
+}
+
+TEST(TestFakeVp8Codec, TestPaddingAllStreams) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestPaddingAllStreams();
+}
+
+TEST(TestFakeVp8Codec, TestPaddingTwoStreams) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestPaddingTwoStreams();
+}
+
+TEST(TestFakeVp8Codec, TestPaddingTwoStreamsOneMaxedOut) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestPaddingTwoStreamsOneMaxedOut();
+}
+
+TEST(TestFakeVp8Codec, TestPaddingOneStream) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestPaddingOneStream();
+}
+
+TEST(TestFakeVp8Codec, TestPaddingOneStreamTwoMaxedOut) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestPaddingOneStreamTwoMaxedOut();
+}
+
+TEST(TestFakeVp8Codec, TestSendAllStreams) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestSendAllStreams();
+}
+
+TEST(TestFakeVp8Codec, TestDisablingStreams) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestDisablingStreams();
+}
+
+TEST(TestFakeVp8Codec, TestSwitchingToOneStream) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestSwitchingToOneStream();
+}
+
+TEST(TestFakeVp8Codec, TestSwitchingToOneOddStream) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestSwitchingToOneOddStream();
+}
+
+TEST(TestFakeVp8Codec, TestSwitchingToOneSmallStream) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestSwitchingToOneSmallStream();
+}
+
+TEST(TestFakeVp8Codec, TestSpatioTemporalLayers333PatternEncoder) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestSpatioTemporalLayers333PatternEncoder();
+}
+
+TEST(TestFakeVp8Codec, TestDecodeWidthHeightSet) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestDecodeWidthHeightSet();
+}
+
+TEST(TestFakeVp8Codec,
+ TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation) {
+ auto fixture = CreateSpecificSimulcastTestFixture();
+ fixture->TestEncoderInfoForDefaultTemporalLayerProfileHasFpsAllocation();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/field_trial.cc b/third_party/libwebrtc/test/field_trial.cc
new file mode 100644
index 0000000000..3d6c6ac617
--- /dev/null
+++ b/third_party/libwebrtc/test/field_trial.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/field_trial.h"
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace test {
+
+ScopedFieldTrials::ScopedFieldTrials(absl::string_view config)
+ : current_field_trials_(config),
+ previous_field_trials_(webrtc::field_trial::GetFieldTrialString()) {
+ RTC_CHECK(webrtc::field_trial::FieldTrialsStringIsValid(
+ current_field_trials_.c_str()))
+ << "Invalid field trials string: " << current_field_trials_;
+ webrtc::field_trial::InitFieldTrialsFromString(current_field_trials_.c_str());
+}
+
+ScopedFieldTrials::~ScopedFieldTrials() {
+ RTC_CHECK(
+ webrtc::field_trial::FieldTrialsStringIsValid(previous_field_trials_))
+ << "Invalid field trials string: " << previous_field_trials_;
+ webrtc::field_trial::InitFieldTrialsFromString(previous_field_trials_);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/field_trial.h b/third_party/libwebrtc/test/field_trial.h
new file mode 100644
index 0000000000..516faa0513
--- /dev/null
+++ b/third_party/libwebrtc/test/field_trial.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FIELD_TRIAL_H_
+#define TEST_FIELD_TRIAL_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+namespace test {
+
+// This class is used to override field-trial configs within specific tests.
+// After this class goes out of scope previous field trials will be restored.
+class ScopedFieldTrials {
+ public:
+ explicit ScopedFieldTrials(absl::string_view config);
+ ScopedFieldTrials(const ScopedFieldTrials&) = delete;
+ ScopedFieldTrials& operator=(const ScopedFieldTrials&) = delete;
+ ~ScopedFieldTrials();
+
+ private:
+ std::string current_field_trials_;
+ const char* previous_field_trials_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FIELD_TRIAL_H_
diff --git a/third_party/libwebrtc/test/frame_forwarder.cc b/third_party/libwebrtc/test/frame_forwarder.cc
new file mode 100644
index 0000000000..e89f753bd3
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_forwarder.cc
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/frame_forwarder.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+FrameForwarder::FrameForwarder() : sink_(nullptr) {}
+FrameForwarder::~FrameForwarder() {}
+
+void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) {
+ MutexLock lock(&mutex_);
+ if (sink_)
+ sink_->OnFrame(video_frame);
+}
+
+void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ MutexLock lock(&mutex_);
+ AddOrUpdateSinkLocked(sink, wants);
+}
+
+void FrameForwarder::AddOrUpdateSinkLocked(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(!sink_ || sink_ == sink);
+ sink_ = sink;
+ sink_wants_ = wants;
+}
+
+void FrameForwarder::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+ MutexLock lock(&mutex_);
+ RTC_DCHECK_EQ(sink, sink_);
+ sink_ = nullptr;
+}
+
+rtc::VideoSinkWants FrameForwarder::sink_wants() const {
+ MutexLock lock(&mutex_);
+ return sink_wants_;
+}
+
+rtc::VideoSinkWants FrameForwarder::sink_wants_locked() const {
+ return sink_wants_;
+}
+
+bool FrameForwarder::has_sinks() const {
+ MutexLock lock(&mutex_);
+ return sink_ != nullptr;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_forwarder.h b/third_party/libwebrtc/test/frame_forwarder.h
new file mode 100644
index 0000000000..6dfba9521d
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_forwarder.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_FRAME_FORWARDER_H_
+#define TEST_FRAME_FORWARDER_H_
+
+#include "api/video/video_frame.h"
+#include "api/video/video_source_interface.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace test {
+
+// FrameForwarder can be used as an implementation
+// of rtc::VideoSourceInterface<VideoFrame> where the caller controls when
+// a frame should be forwarded to its sink.
+// Currently this implementation only support one sink.
+class FrameForwarder : public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ FrameForwarder();
+ ~FrameForwarder() override;
+ // Forwards `video_frame` to the registered `sink_`.
+ virtual void IncomingCapturedFrame(const VideoFrame& video_frame)
+ RTC_LOCKS_EXCLUDED(mutex_);
+ rtc::VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(mutex_);
+ bool has_sinks() const RTC_LOCKS_EXCLUDED(mutex_);
+
+ protected:
+ rtc::VideoSinkWants sink_wants_locked() const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+ void AddOrUpdateSinkLocked(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink)
+ RTC_LOCKS_EXCLUDED(mutex_) override;
+
+ mutable Mutex mutex_;
+ rtc::VideoSinkInterface<VideoFrame>* sink_ RTC_GUARDED_BY(mutex_);
+ rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FRAME_FORWARDER_H_
diff --git a/third_party/libwebrtc/test/frame_generator.cc b/third_party/libwebrtc/test/frame_generator.cc
new file mode 100644
index 0000000000..b6f16a573d
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator.cc
@@ -0,0 +1,438 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/frame_generator.h"
+
+#include <string.h>
+
+#include <cstdint>
+#include <cstdio>
+#include <memory>
+
+#include "api/video/i010_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "api/video/video_rotation.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "test/frame_utils.h"
+
+namespace webrtc {
+namespace test {
+
+SquareGenerator::SquareGenerator(int width,
+ int height,
+ OutputType type,
+ int num_squares)
+ : type_(type) {
+ ChangeResolution(width, height);
+ for (int i = 0; i < num_squares; ++i) {
+ squares_.emplace_back(new Square(width, height, i + 1));
+ }
+}
+
+void SquareGenerator::ChangeResolution(size_t width, size_t height) {
+ MutexLock lock(&mutex_);
+ width_ = static_cast<int>(width);
+ height_ = static_cast<int>(height);
+ RTC_CHECK(width_ > 0);
+ RTC_CHECK(height_ > 0);
+}
+
+rtc::scoped_refptr<I420Buffer> SquareGenerator::CreateI420Buffer(int width,
+ int height) {
+ rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
+ memset(buffer->MutableDataY(), 127, height * buffer->StrideY());
+ memset(buffer->MutableDataU(), 127,
+ buffer->ChromaHeight() * buffer->StrideU());
+ memset(buffer->MutableDataV(), 127,
+ buffer->ChromaHeight() * buffer->StrideV());
+ return buffer;
+}
+
+FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() {
+ MutexLock lock(&mutex_);
+
+ rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
+ switch (type_) {
+ case OutputType::kI420:
+ case OutputType::kI010:
+ case OutputType::kNV12: {
+ buffer = CreateI420Buffer(width_, height_);
+ break;
+ }
+ case OutputType::kI420A: {
+ rtc::scoped_refptr<I420Buffer> yuv_buffer =
+ CreateI420Buffer(width_, height_);
+ rtc::scoped_refptr<I420Buffer> axx_buffer =
+ CreateI420Buffer(width_, height_);
+ buffer = WrapI420ABuffer(yuv_buffer->width(), yuv_buffer->height(),
+ yuv_buffer->DataY(), yuv_buffer->StrideY(),
+ yuv_buffer->DataU(), yuv_buffer->StrideU(),
+ yuv_buffer->DataV(), yuv_buffer->StrideV(),
+ axx_buffer->DataY(), axx_buffer->StrideY(),
+ // To keep references alive.
+ [yuv_buffer, axx_buffer] {});
+ break;
+ }
+ default:
+ RTC_DCHECK_NOTREACHED() << "The given output format is not supported.";
+ }
+
+ for (const auto& square : squares_)
+ square->Draw(buffer);
+
+ if (type_ == OutputType::kI010) {
+ buffer = I010Buffer::Copy(*buffer->ToI420());
+ } else if (type_ == OutputType::kNV12) {
+ buffer = NV12Buffer::Copy(*buffer->ToI420());
+ }
+
+ return VideoFrameData(buffer, absl::nullopt);
+}
+
+SquareGenerator::Square::Square(int width, int height, int seed)
+ : random_generator_(seed),
+ x_(random_generator_.Rand(0, width)),
+ y_(random_generator_.Rand(0, height)),
+ length_(random_generator_.Rand(1, width > 4 ? width / 4 : 1)),
+ yuv_y_(random_generator_.Rand(0, 255)),
+ yuv_u_(random_generator_.Rand(0, 255)),
+ yuv_v_(random_generator_.Rand(0, 255)),
+ yuv_a_(random_generator_.Rand(0, 255)) {}
+
+void SquareGenerator::Square::Draw(
+ const rtc::scoped_refptr<VideoFrameBuffer>& frame_buffer) {
+ RTC_DCHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 ||
+ frame_buffer->type() == VideoFrameBuffer::Type::kI420A);
+ rtc::scoped_refptr<I420BufferInterface> buffer = frame_buffer->ToI420();
+ int length_cap = std::min(buffer->height(), buffer->width()) / 4;
+ int length = std::min(length_, length_cap);
+ x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length);
+ y_ = (y_ + random_generator_.Rand(0, 4)) % (buffer->height() - length);
+ for (int y = y_; y < y_ + length; ++y) {
+ uint8_t* pos_y =
+ (const_cast<uint8_t*>(buffer->DataY()) + x_ + y * buffer->StrideY());
+ memset(pos_y, yuv_y_, length);
+ }
+
+ for (int y = y_; y < y_ + length; y = y + 2) {
+ uint8_t* pos_u = (const_cast<uint8_t*>(buffer->DataU()) + x_ / 2 +
+ y / 2 * buffer->StrideU());
+ memset(pos_u, yuv_u_, length / 2);
+ uint8_t* pos_v = (const_cast<uint8_t*>(buffer->DataV()) + x_ / 2 +
+ y / 2 * buffer->StrideV());
+ memset(pos_v, yuv_v_, length / 2);
+ }
+
+ if (frame_buffer->type() == VideoFrameBuffer::Type::kI420)
+ return;
+
+ // Optionally draw on alpha plane if given.
+ const webrtc::I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A();
+ for (int y = y_; y < y_ + length; ++y) {
+ uint8_t* pos_y = (const_cast<uint8_t*>(yuva_buffer->DataA()) + x_ +
+ y * yuva_buffer->StrideA());
+ memset(pos_y, yuv_a_, length);
+ }
+}
+
+YuvFileGenerator::YuvFileGenerator(std::vector<FILE*> files,
+ size_t width,
+ size_t height,
+ int frame_repeat_count)
+ : file_index_(0),
+ frame_index_(std::numeric_limits<size_t>::max()),
+ files_(files),
+ width_(width),
+ height_(height),
+ frame_size_(CalcBufferSize(VideoType::kI420,
+ static_cast<int>(width_),
+ static_cast<int>(height_))),
+ frame_buffer_(new uint8_t[frame_size_]),
+ frame_display_count_(frame_repeat_count),
+ current_display_count_(0) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GT(frame_repeat_count, 0);
+}
+
+YuvFileGenerator::~YuvFileGenerator() {
+ for (FILE* file : files_)
+ fclose(file);
+}
+
+FrameGeneratorInterface::VideoFrameData YuvFileGenerator::NextFrame() {
+ // Empty update by default.
+ VideoFrame::UpdateRect update_rect{0, 0, 0, 0};
+ if (current_display_count_ == 0) {
+ const bool got_new_frame = ReadNextFrame();
+ // Full update on a new frame from file.
+ if (got_new_frame) {
+ update_rect = VideoFrame::UpdateRect{0, 0, static_cast<int>(width_),
+ static_cast<int>(height_)};
+ }
+ }
+ if (++current_display_count_ >= frame_display_count_)
+ current_display_count_ = 0;
+
+ return VideoFrameData(last_read_buffer_, update_rect);
+}
+
+bool YuvFileGenerator::ReadNextFrame() {
+ size_t prev_frame_index = frame_index_;
+ size_t prev_file_index = file_index_;
+ last_read_buffer_ = test::ReadI420Buffer(
+ static_cast<int>(width_), static_cast<int>(height_), files_[file_index_]);
+ ++frame_index_;
+ if (!last_read_buffer_) {
+ // No more frames to read in this file, rewind and move to next file.
+ rewind(files_[file_index_]);
+
+ frame_index_ = 0;
+ file_index_ = (file_index_ + 1) % files_.size();
+ last_read_buffer_ =
+ test::ReadI420Buffer(static_cast<int>(width_),
+ static_cast<int>(height_), files_[file_index_]);
+ RTC_CHECK(last_read_buffer_);
+ }
+ return frame_index_ != prev_frame_index || file_index_ != prev_file_index;
+}
+
+NV12FileGenerator::NV12FileGenerator(std::vector<FILE*> files,
+ size_t width,
+ size_t height,
+ int frame_repeat_count)
+ : file_index_(0),
+ frame_index_(std::numeric_limits<size_t>::max()),
+ files_(files),
+ width_(width),
+ height_(height),
+ frame_size_(CalcBufferSize(VideoType::kNV12,
+ static_cast<int>(width_),
+ static_cast<int>(height_))),
+ frame_buffer_(new uint8_t[frame_size_]),
+ frame_display_count_(frame_repeat_count),
+ current_display_count_(0) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GT(frame_repeat_count, 0);
+}
+
+NV12FileGenerator::~NV12FileGenerator() {
+ for (FILE* file : files_)
+ fclose(file);
+}
+
+FrameGeneratorInterface::VideoFrameData NV12FileGenerator::NextFrame() {
+ // Empty update by default.
+ VideoFrame::UpdateRect update_rect{0, 0, 0, 0};
+ if (current_display_count_ == 0) {
+ const bool got_new_frame = ReadNextFrame();
+ // Full update on a new frame from file.
+ if (got_new_frame) {
+ update_rect = VideoFrame::UpdateRect{0, 0, static_cast<int>(width_),
+ static_cast<int>(height_)};
+ }
+ }
+ if (++current_display_count_ >= frame_display_count_)
+ current_display_count_ = 0;
+
+ return VideoFrameData(last_read_buffer_, update_rect);
+}
+
+bool NV12FileGenerator::ReadNextFrame() {
+ size_t prev_frame_index = frame_index_;
+ size_t prev_file_index = file_index_;
+ last_read_buffer_ = test::ReadNV12Buffer(
+ static_cast<int>(width_), static_cast<int>(height_), files_[file_index_]);
+ ++frame_index_;
+ if (!last_read_buffer_) {
+ // No more frames to read in this file, rewind and move to next file.
+ rewind(files_[file_index_]);
+
+ frame_index_ = 0;
+ file_index_ = (file_index_ + 1) % files_.size();
+ last_read_buffer_ =
+ test::ReadNV12Buffer(static_cast<int>(width_),
+ static_cast<int>(height_), files_[file_index_]);
+ RTC_CHECK(last_read_buffer_);
+ }
+ return frame_index_ != prev_frame_index || file_index_ != prev_file_index;
+}
+
+SlideGenerator::SlideGenerator(int width, int height, int frame_repeat_count)
+ : width_(width),
+ height_(height),
+ frame_display_count_(frame_repeat_count),
+ current_display_count_(0),
+ random_generator_(1234) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GT(frame_repeat_count, 0);
+}
+
+FrameGeneratorInterface::VideoFrameData SlideGenerator::NextFrame() {
+ if (current_display_count_ == 0)
+ GenerateNewFrame();
+ if (++current_display_count_ >= frame_display_count_)
+ current_display_count_ = 0;
+
+ return VideoFrameData(buffer_, absl::nullopt);
+}
+
+void SlideGenerator::GenerateNewFrame() {
+ // The squares should have a varying order of magnitude in order
+ // to simulate variation in the slides' complexity.
+ const int kSquareNum = 1 << (4 + (random_generator_.Rand(0, 3) * 2));
+
+ buffer_ = I420Buffer::Create(width_, height_);
+ memset(buffer_->MutableDataY(), 127, height_ * buffer_->StrideY());
+ memset(buffer_->MutableDataU(), 127,
+ buffer_->ChromaHeight() * buffer_->StrideU());
+ memset(buffer_->MutableDataV(), 127,
+ buffer_->ChromaHeight() * buffer_->StrideV());
+
+ for (int i = 0; i < kSquareNum; ++i) {
+ int length = random_generator_.Rand(1, width_ > 4 ? width_ / 4 : 1);
+ // Limit the length of later squares so that they don't overwrite the
+ // previous ones too much.
+ length = (length * (kSquareNum - i)) / kSquareNum;
+
+ int x = random_generator_.Rand(0, width_ - length);
+ int y = random_generator_.Rand(0, height_ - length);
+ uint8_t yuv_y = random_generator_.Rand(0, 255);
+ uint8_t yuv_u = random_generator_.Rand(0, 255);
+ uint8_t yuv_v = random_generator_.Rand(0, 255);
+
+ for (int yy = y; yy < y + length; ++yy) {
+ uint8_t* pos_y = (buffer_->MutableDataY() + x + yy * buffer_->StrideY());
+ memset(pos_y, yuv_y, length);
+ }
+ for (int yy = y; yy < y + length; yy += 2) {
+ uint8_t* pos_u =
+ (buffer_->MutableDataU() + x / 2 + yy / 2 * buffer_->StrideU());
+ memset(pos_u, yuv_u, length / 2);
+ uint8_t* pos_v =
+ (buffer_->MutableDataV() + x / 2 + yy / 2 * buffer_->StrideV());
+ memset(pos_v, yuv_v, length / 2);
+ }
+ }
+}
+
+ScrollingImageFrameGenerator::ScrollingImageFrameGenerator(
+ Clock* clock,
+ const std::vector<FILE*>& files,
+ size_t source_width,
+ size_t source_height,
+ size_t target_width,
+ size_t target_height,
+ int64_t scroll_time_ms,
+ int64_t pause_time_ms)
+ : clock_(clock),
+ start_time_(clock->TimeInMilliseconds()),
+ scroll_time_(scroll_time_ms),
+ pause_time_(pause_time_ms),
+ num_frames_(files.size()),
+ target_width_(static_cast<int>(target_width)),
+ target_height_(static_cast<int>(target_height)),
+ current_frame_num_(num_frames_ - 1),
+ prev_frame_not_scrolled_(false),
+ current_source_frame_(nullptr, absl::nullopt),
+ current_frame_(nullptr, absl::nullopt),
+ file_generator_(files, source_width, source_height, 1) {
+ RTC_DCHECK(clock_ != nullptr);
+ RTC_DCHECK_GT(num_frames_, 0);
+ RTC_DCHECK_GE(source_height, target_height);
+ RTC_DCHECK_GE(source_width, target_width);
+ RTC_DCHECK_GE(scroll_time_ms, 0);
+ RTC_DCHECK_GE(pause_time_ms, 0);
+ RTC_DCHECK_GT(scroll_time_ms + pause_time_ms, 0);
+}
+
+FrameGeneratorInterface::VideoFrameData
+ScrollingImageFrameGenerator::NextFrame() {
+ const int64_t kFrameDisplayTime = scroll_time_ + pause_time_;
+ const int64_t now = clock_->TimeInMilliseconds();
+ int64_t ms_since_start = now - start_time_;
+
+ size_t frame_num = (ms_since_start / kFrameDisplayTime) % num_frames_;
+ UpdateSourceFrame(frame_num);
+
+ bool cur_frame_not_scrolled;
+
+ double scroll_factor;
+ int64_t time_into_frame = ms_since_start % kFrameDisplayTime;
+ if (time_into_frame < scroll_time_) {
+ scroll_factor = static_cast<double>(time_into_frame) / scroll_time_;
+ cur_frame_not_scrolled = false;
+ } else {
+ scroll_factor = 1.0;
+ cur_frame_not_scrolled = true;
+ }
+ CropSourceToScrolledImage(scroll_factor);
+
+ bool same_scroll_position =
+ prev_frame_not_scrolled_ && cur_frame_not_scrolled;
+ if (!same_scroll_position) {
+ // If scrolling is not finished yet, force full frame update.
+ current_frame_.update_rect =
+ VideoFrame::UpdateRect{0, 0, target_width_, target_height_};
+ }
+ prev_frame_not_scrolled_ = cur_frame_not_scrolled;
+
+ return current_frame_;
+}
+
+void ScrollingImageFrameGenerator::UpdateSourceFrame(size_t frame_num) {
+ VideoFrame::UpdateRect acc_update{0, 0, 0, 0};
+ while (current_frame_num_ != frame_num) {
+ current_source_frame_ = file_generator_.NextFrame();
+ if (current_source_frame_.update_rect) {
+ acc_update.Union(*current_source_frame_.update_rect);
+ }
+ current_frame_num_ = (current_frame_num_ + 1) % num_frames_;
+ }
+ current_source_frame_.update_rect = acc_update;
+}
+
+void ScrollingImageFrameGenerator::CropSourceToScrolledImage(
+ double scroll_factor) {
+ int scroll_margin_x = current_source_frame_.buffer->width() - target_width_;
+ int pixels_scrolled_x =
+ static_cast<int>(scroll_margin_x * scroll_factor + 0.5);
+ int scroll_margin_y = current_source_frame_.buffer->height() - target_height_;
+ int pixels_scrolled_y =
+ static_cast<int>(scroll_margin_y * scroll_factor + 0.5);
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer =
+ current_source_frame_.buffer->ToI420();
+ int offset_y =
+ (i420_buffer->StrideY() * pixels_scrolled_y) + pixels_scrolled_x;
+ int offset_u = (i420_buffer->StrideU() * (pixels_scrolled_y / 2)) +
+ (pixels_scrolled_x / 2);
+ int offset_v = (i420_buffer->StrideV() * (pixels_scrolled_y / 2)) +
+ (pixels_scrolled_x / 2);
+
+ VideoFrame::UpdateRect update_rect =
+ current_source_frame_.update_rect->IsEmpty()
+ ? VideoFrame::UpdateRect{0, 0, 0, 0}
+ : VideoFrame::UpdateRect{0, 0, target_width_, target_height_};
+ current_frame_ = VideoFrameData(
+ WrapI420Buffer(target_width_, target_height_,
+ &i420_buffer->DataY()[offset_y], i420_buffer->StrideY(),
+ &i420_buffer->DataU()[offset_u], i420_buffer->StrideU(),
+ &i420_buffer->DataV()[offset_v], i420_buffer->StrideV(),
+ // To keep reference alive.
+ [i420_buffer] {}),
+ update_rect);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_generator.h b/third_party/libwebrtc/test/frame_generator.h
new file mode 100644
index 0000000000..9a8f08cea6
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator.h
@@ -0,0 +1,197 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_FRAME_GENERATOR_H_
+#define TEST_FRAME_GENERATOR_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/scoped_refptr.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_source_interface.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/random.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace test {
+
+// SquareGenerator is a FrameGenerator that draws a given amount of randomly
+// sized and colored squares. Between each new generated frame, the squares
+// are moved slightly towards the lower right corner.
+class SquareGenerator : public FrameGeneratorInterface {
+ public:
+ SquareGenerator(int width, int height, OutputType type, int num_squares);
+
+ void ChangeResolution(size_t width, size_t height) override;
+ VideoFrameData NextFrame() override;
+
+ private:
+ rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height);
+
+ class Square {
+ public:
+ Square(int width, int height, int seed);
+
+ void Draw(const rtc::scoped_refptr<VideoFrameBuffer>& frame_buffer);
+
+ private:
+ Random random_generator_;
+ int x_;
+ int y_;
+ const int length_;
+ const uint8_t yuv_y_;
+ const uint8_t yuv_u_;
+ const uint8_t yuv_v_;
+ const uint8_t yuv_a_;
+ };
+
+ Mutex mutex_;
+ const OutputType type_;
+ int width_ RTC_GUARDED_BY(&mutex_);
+ int height_ RTC_GUARDED_BY(&mutex_);
+ std::vector<std::unique_ptr<Square>> squares_ RTC_GUARDED_BY(&mutex_);
+};
+
+class YuvFileGenerator : public FrameGeneratorInterface {
+ public:
+ YuvFileGenerator(std::vector<FILE*> files,
+ size_t width,
+ size_t height,
+ int frame_repeat_count);
+
+ ~YuvFileGenerator();
+
+ VideoFrameData NextFrame() override;
+ void ChangeResolution(size_t width, size_t height) override {
+ RTC_LOG(LS_WARNING) << "YuvFileGenerator::ChangeResolution not implemented";
+ }
+
+ private:
+ // Returns true if the new frame was loaded.
+ // False only in case of a single file with a single frame in it.
+ bool ReadNextFrame();
+
+ size_t file_index_;
+ size_t frame_index_;
+ const std::vector<FILE*> files_;
+ const size_t width_;
+ const size_t height_;
+ const size_t frame_size_;
+ const std::unique_ptr<uint8_t[]> frame_buffer_;
+ const int frame_display_count_;
+ int current_display_count_;
+ rtc::scoped_refptr<I420Buffer> last_read_buffer_;
+};
+
+class NV12FileGenerator : public FrameGeneratorInterface {
+ public:
+ NV12FileGenerator(std::vector<FILE*> files,
+ size_t width,
+ size_t height,
+ int frame_repeat_count);
+
+ ~NV12FileGenerator();
+
+ VideoFrameData NextFrame() override;
+ void ChangeResolution(size_t width, size_t height) override {
+ RTC_LOG(LS_WARNING)
+ << "NV12FileGenerator::ChangeResolution not implemented";
+ }
+
+ private:
+ // Returns true if the new frame was loaded.
+ // False only in case of a single file with a single frame in it.
+ bool ReadNextFrame();
+
+ size_t file_index_;
+ size_t frame_index_;
+ const std::vector<FILE*> files_;
+ const size_t width_;
+ const size_t height_;
+ const size_t frame_size_;
+ const std::unique_ptr<uint8_t[]> frame_buffer_;
+ const int frame_display_count_;
+ int current_display_count_;
+ rtc::scoped_refptr<NV12Buffer> last_read_buffer_;
+};
+
+// SlideGenerator works similarly to YuvFileGenerator but it fills the frames
+// with randomly sized and colored squares instead of reading their content
+// from files.
+class SlideGenerator : public FrameGeneratorInterface {
+ public:
+ SlideGenerator(int width, int height, int frame_repeat_count);
+
+ VideoFrameData NextFrame() override;
+ void ChangeResolution(size_t width, size_t height) override {
+ RTC_LOG(LS_WARNING) << "SlideGenerator::ChangeResolution not implemented";
+ }
+
+ private:
+ // Generates some randomly sized and colored squares scattered
+ // over the frame.
+ void GenerateNewFrame();
+
+ const int width_;
+ const int height_;
+ const int frame_display_count_;
+ int current_display_count_;
+ Random random_generator_;
+ rtc::scoped_refptr<I420Buffer> buffer_;
+};
+
+class ScrollingImageFrameGenerator : public FrameGeneratorInterface {
+ public:
+ ScrollingImageFrameGenerator(Clock* clock,
+ const std::vector<FILE*>& files,
+ size_t source_width,
+ size_t source_height,
+ size_t target_width,
+ size_t target_height,
+ int64_t scroll_time_ms,
+ int64_t pause_time_ms);
+ ~ScrollingImageFrameGenerator() override = default;
+
+ VideoFrameData NextFrame() override;
+ void ChangeResolution(size_t width, size_t height) override {
+ RTC_LOG(LS_WARNING)
+ << "ScrollingImageFrameGenerator::ChangeResolution not implemented";
+ }
+
+ private:
+ void UpdateSourceFrame(size_t frame_num);
+ void CropSourceToScrolledImage(double scroll_factor);
+
+ Clock* const clock_;
+ const int64_t start_time_;
+ const int64_t scroll_time_;
+ const int64_t pause_time_;
+ const size_t num_frames_;
+ const int target_width_;
+ const int target_height_;
+
+ size_t current_frame_num_;
+ bool prev_frame_not_scrolled_;
+ VideoFrameData current_source_frame_;
+ VideoFrameData current_frame_;
+ YuvFileGenerator file_generator_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FRAME_GENERATOR_H_
diff --git a/third_party/libwebrtc/test/frame_generator_capturer.cc b/third_party/libwebrtc/test/frame_generator_capturer.cc
new file mode 100644
index 0000000000..c69fca0965
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator_capturer.cc
@@ -0,0 +1,323 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/frame_generator_capturer.h"
+
+#include <algorithm>
+#include <cmath>
+#include <limits>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "api/test/create_frame_generator.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/clock.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+std::string TransformFilePath(std::string path) {
+ static const std::string resource_prefix = "res://";
+ int ext_pos = path.rfind('.');
+ if (ext_pos < 0) {
+ return test::ResourcePath(path, "yuv");
+ } else if (absl::StartsWith(path, resource_prefix)) {
+ std::string name = path.substr(resource_prefix.length(), ext_pos);
+ std::string ext = path.substr(ext_pos, path.size());
+ return test::ResourcePath(name, ext);
+ }
+ return path;
+}
+} // namespace
+
+FrameGeneratorCapturer::FrameGeneratorCapturer(
+ Clock* clock,
+ std::unique_ptr<FrameGeneratorInterface> frame_generator,
+ int target_fps,
+ TaskQueueFactory& task_queue_factory)
+ : clock_(clock),
+ sending_(true),
+ sink_wants_observer_(nullptr),
+ frame_generator_(std::move(frame_generator)),
+ source_fps_(target_fps),
+ target_capture_fps_(target_fps),
+ first_frame_capture_time_(-1),
+ task_queue_(task_queue_factory.CreateTaskQueue(
+ "FrameGenCapQ",
+ TaskQueueFactory::Priority::HIGH)) {
+ RTC_DCHECK(frame_generator_);
+ RTC_DCHECK_GT(target_fps, 0);
+}
+
+FrameGeneratorCapturer::~FrameGeneratorCapturer() {
+ Stop();
+}
+
+std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::SquaresVideo config) {
+ return std::make_unique<FrameGeneratorCapturer>(
+ clock,
+ CreateSquareFrameGenerator(config.width, config.height,
+ config.pixel_format, config.num_squares),
+ config.framerate, task_queue_factory);
+}
+std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::SquareSlides config) {
+ return std::make_unique<FrameGeneratorCapturer>(
+ clock,
+ CreateSlideFrameGenerator(
+ config.width, config.height,
+ /*frame_repeat_count*/ config.change_interval.seconds<double>() *
+ config.framerate),
+ config.framerate, task_queue_factory);
+}
+std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::VideoFile config) {
+ RTC_CHECK(config.width && config.height);
+ return std::make_unique<FrameGeneratorCapturer>(
+ clock,
+ CreateFromYuvFileFrameGenerator({TransformFilePath(config.name)},
+ config.width, config.height,
+ /*frame_repeat_count*/ 1),
+ config.framerate, task_queue_factory);
+}
+
+std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::ImageSlides config) {
+ std::unique_ptr<FrameGeneratorInterface> slides_generator;
+ std::vector<std::string> paths = config.paths;
+ for (std::string& path : paths)
+ path = TransformFilePath(path);
+
+ if (config.crop.width || config.crop.height) {
+ TimeDelta pause_duration =
+ config.change_interval - config.crop.scroll_duration;
+ RTC_CHECK_GE(pause_duration, TimeDelta::Zero());
+ int crop_width = config.crop.width.value_or(config.width);
+ int crop_height = config.crop.height.value_or(config.height);
+ RTC_CHECK_LE(crop_width, config.width);
+ RTC_CHECK_LE(crop_height, config.height);
+ slides_generator = CreateScrollingInputFromYuvFilesFrameGenerator(
+ clock, paths, config.width, config.height, crop_width, crop_height,
+ config.crop.scroll_duration.ms(), pause_duration.ms());
+ } else {
+ slides_generator = CreateFromYuvFileFrameGenerator(
+ paths, config.width, config.height,
+ /*frame_repeat_count*/ config.change_interval.seconds<double>() *
+ config.framerate);
+ }
+ return std::make_unique<FrameGeneratorCapturer>(
+ clock, std::move(slides_generator), config.framerate, task_queue_factory);
+}
+
+std::unique_ptr<FrameGeneratorCapturer> FrameGeneratorCapturer::Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ const FrameGeneratorCapturerConfig& config) {
+ if (config.video_file) {
+ return Create(clock, task_queue_factory, *config.video_file);
+ } else if (config.image_slides) {
+ return Create(clock, task_queue_factory, *config.image_slides);
+ } else if (config.squares_slides) {
+ return Create(clock, task_queue_factory, *config.squares_slides);
+ } else {
+ return Create(clock, task_queue_factory,
+ config.squares_video.value_or(
+ FrameGeneratorCapturerConfig::SquaresVideo()));
+ }
+}
+
+void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) {
+ MutexLock lock(&lock_);
+ fake_rotation_ = rotation;
+}
+
+void FrameGeneratorCapturer::SetFakeColorSpace(
+ absl::optional<ColorSpace> color_space) {
+ MutexLock lock(&lock_);
+ fake_color_space_ = color_space;
+}
+
+bool FrameGeneratorCapturer::Init() {
+ // This check is added because frame_generator_ might be file based and should
+ // not crash because a file moved.
+ if (frame_generator_.get() == nullptr)
+ return false;
+
+ frame_task_ = RepeatingTaskHandle::DelayedStart(
+ task_queue_.Get(),
+ TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(),
+ [this] {
+ InsertFrame();
+ return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate();
+ },
+ TaskQueueBase::DelayPrecision::kHigh);
+ return true;
+}
+
+void FrameGeneratorCapturer::InsertFrame() {
+ absl::optional<Resolution> resolution;
+
+ {
+ MutexLock lock(&lock_);
+ if (sending_) {
+ FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator_->NextFrame();
+ // TODO(srte): Use more advanced frame rate control to allow arbritrary
+ // fractions.
+ int decimation =
+ std::round(static_cast<double>(source_fps_) / target_capture_fps_);
+ for (int i = 1; i < decimation; ++i)
+ frame_data = frame_generator_->NextFrame();
+
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_rotation(fake_rotation_)
+ .set_timestamp_us(clock_->TimeInMicroseconds())
+ .set_ntp_time_ms(clock_->CurrentNtpInMilliseconds())
+ .set_update_rect(frame_data.update_rect)
+ .set_color_space(fake_color_space_)
+ .build();
+ if (first_frame_capture_time_ == -1) {
+ first_frame_capture_time_ = frame.ntp_time_ms();
+ }
+
+ resolution = Resolution{frame.width(), frame.height()};
+
+ TestVideoCapturer::OnFrame(frame);
+ }
+ }
+
+ if (resolution) {
+ MutexLock lock(&stats_lock_);
+ source_resolution_ = resolution;
+ }
+}
+
+absl::optional<FrameGeneratorCapturer::Resolution>
+FrameGeneratorCapturer::GetResolution() {
+ MutexLock lock(&stats_lock_);
+ return source_resolution_;
+}
+
+void FrameGeneratorCapturer::Start() {
+ {
+ MutexLock lock(&lock_);
+ sending_ = true;
+ }
+ if (!frame_task_.Running()) {
+ frame_task_ = RepeatingTaskHandle::Start(
+ task_queue_.Get(),
+ [this] {
+ InsertFrame();
+ return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate();
+ },
+ TaskQueueBase::DelayPrecision::kHigh);
+ }
+}
+
+void FrameGeneratorCapturer::Stop() {
+ MutexLock lock(&lock_);
+ sending_ = false;
+}
+
+void FrameGeneratorCapturer::ChangeResolution(size_t width, size_t height) {
+ MutexLock lock(&lock_);
+ frame_generator_->ChangeResolution(width, height);
+}
+
+void FrameGeneratorCapturer::ChangeFramerate(int target_framerate) {
+ MutexLock lock(&lock_);
+ RTC_CHECK(target_capture_fps_ > 0);
+ if (target_framerate > source_fps_)
+ RTC_LOG(LS_WARNING) << "Target framerate clamped from " << target_framerate
+ << " to " << source_fps_;
+ if (source_fps_ % target_capture_fps_ != 0) {
+ int decimation =
+ std::round(static_cast<double>(source_fps_) / target_capture_fps_);
+ int effective_rate = target_capture_fps_ / decimation;
+ RTC_LOG(LS_WARNING) << "Target framerate, " << target_framerate
+ << ", is an uneven fraction of the source rate, "
+ << source_fps_
+ << ". The framerate will be :" << effective_rate;
+ }
+ target_capture_fps_ = std::min(source_fps_, target_framerate);
+}
+
+void FrameGeneratorCapturer::OnOutputFormatRequest(
+ int width,
+ int height,
+ const absl::optional<int>& max_fps) {
+ TestVideoCapturer::OnOutputFormatRequest(width, height, max_fps);
+}
+
+void FrameGeneratorCapturer::SetSinkWantsObserver(SinkWantsObserver* observer) {
+ MutexLock lock(&lock_);
+ RTC_DCHECK(!sink_wants_observer_);
+ sink_wants_observer_ = observer;
+}
+
+void FrameGeneratorCapturer::AddOrUpdateSink(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ TestVideoCapturer::AddOrUpdateSink(sink, wants);
+ MutexLock lock(&lock_);
+ if (sink_wants_observer_) {
+ // Tests need to observe unmodified sink wants.
+ sink_wants_observer_->OnSinkWantsChanged(sink, wants);
+ }
+ UpdateFps(GetSinkWants().max_framerate_fps);
+}
+
+void FrameGeneratorCapturer::RemoveSink(
+ rtc::VideoSinkInterface<VideoFrame>* sink) {
+ TestVideoCapturer::RemoveSink(sink);
+
+ MutexLock lock(&lock_);
+ UpdateFps(GetSinkWants().max_framerate_fps);
+}
+
+void FrameGeneratorCapturer::UpdateFps(int max_fps) {
+ if (max_fps < target_capture_fps_) {
+ wanted_fps_.emplace(max_fps);
+ } else {
+ wanted_fps_.reset();
+ }
+}
+
+void FrameGeneratorCapturer::ForceFrame() {
+ // One-time non-repeating task,
+ task_queue_.PostTask([this] { InsertFrame(); });
+}
+
+int FrameGeneratorCapturer::GetCurrentConfiguredFramerate() {
+ MutexLock lock(&lock_);
+ if (wanted_fps_ && *wanted_fps_ < target_capture_fps_)
+ return *wanted_fps_;
+ return target_capture_fps_;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_generator_capturer.h b/third_party/libwebrtc/test/frame_generator_capturer.h
new file mode 100644
index 0000000000..e310e40129
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator_capturer.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_FRAME_GENERATOR_CAPTURER_H_
+#define TEST_FRAME_GENERATOR_CAPTURER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "system_wrappers/include/clock.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+
+namespace test {
+namespace frame_gen_cap_impl {
+template <typename T>
+class AutoOpt : public absl::optional<T> {
+ public:
+ using absl::optional<T>::optional;
+ T* operator->() {
+ if (!absl::optional<T>::has_value())
+ this->emplace(T());
+ return absl::optional<T>::operator->();
+ }
+};
+} // namespace frame_gen_cap_impl
+struct FrameGeneratorCapturerConfig {
+ struct SquaresVideo {
+ int framerate = 30;
+ FrameGeneratorInterface::OutputType pixel_format =
+ FrameGeneratorInterface::OutputType::kI420;
+ int width = 320;
+ int height = 180;
+ int num_squares = 10;
+ };
+
+ struct SquareSlides {
+ int framerate = 30;
+ TimeDelta change_interval = TimeDelta::Seconds(10);
+ int width = 1600;
+ int height = 1200;
+ };
+
+ struct VideoFile {
+ int framerate = 30;
+ std::string name;
+ // Must be set to width and height of the source video file.
+ int width = 0;
+ int height = 0;
+ };
+
+ struct ImageSlides {
+ int framerate = 30;
+ TimeDelta change_interval = TimeDelta::Seconds(10);
+ struct Crop {
+ TimeDelta scroll_duration = TimeDelta::Seconds(0);
+ absl::optional<int> width;
+ absl::optional<int> height;
+ } crop;
+ int width = 1850;
+ int height = 1110;
+ std::vector<std::string> paths = {
+ "web_screenshot_1850_1110",
+ "presentation_1850_1110",
+ "photo_1850_1110",
+ "difficult_photo_1850_1110",
+ };
+ };
+
+ frame_gen_cap_impl::AutoOpt<SquaresVideo> squares_video;
+ frame_gen_cap_impl::AutoOpt<SquareSlides> squares_slides;
+ frame_gen_cap_impl::AutoOpt<VideoFile> video_file;
+ frame_gen_cap_impl::AutoOpt<ImageSlides> image_slides;
+};
+
+class FrameGeneratorCapturer : public TestVideoCapturer {
+ public:
+ class SinkWantsObserver {
+ public:
+ // OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink
+ // is called.
+ virtual void OnSinkWantsChanged(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) = 0;
+
+ protected:
+ virtual ~SinkWantsObserver() {}
+ };
+
+ FrameGeneratorCapturer(
+ Clock* clock,
+ std::unique_ptr<FrameGeneratorInterface> frame_generator,
+ int target_fps,
+ TaskQueueFactory& task_queue_factory);
+ virtual ~FrameGeneratorCapturer();
+
+ static std::unique_ptr<FrameGeneratorCapturer> Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::SquaresVideo config);
+ static std::unique_ptr<FrameGeneratorCapturer> Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::SquareSlides config);
+ static std::unique_ptr<FrameGeneratorCapturer> Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::VideoFile config);
+ static std::unique_ptr<FrameGeneratorCapturer> Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ FrameGeneratorCapturerConfig::ImageSlides config);
+ static std::unique_ptr<FrameGeneratorCapturer> Create(
+ Clock* clock,
+ TaskQueueFactory& task_queue_factory,
+ const FrameGeneratorCapturerConfig& config);
+
+ void Start();
+ void Stop();
+ void ChangeResolution(size_t width, size_t height);
+ void ChangeFramerate(int target_framerate);
+
+ struct Resolution {
+ int width;
+ int height;
+ };
+ absl::optional<Resolution> GetResolution();
+
+ void OnOutputFormatRequest(int width,
+ int height,
+ const absl::optional<int>& max_fps);
+
+ void SetSinkWantsObserver(SinkWantsObserver* observer);
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+
+ void ForceFrame();
+ void SetFakeRotation(VideoRotation rotation);
+ void SetFakeColorSpace(absl::optional<ColorSpace> color_space);
+
+ int64_t first_frame_capture_time() const { return first_frame_capture_time_; }
+
+ bool Init();
+
+ private:
+ void InsertFrame();
+ static bool Run(void* obj);
+ int GetCurrentConfiguredFramerate();
+ void UpdateFps(int max_fps) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_);
+
+ Clock* const clock_;
+ RepeatingTaskHandle frame_task_;
+ bool sending_;
+ SinkWantsObserver* sink_wants_observer_ RTC_GUARDED_BY(&lock_);
+
+ Mutex lock_;
+ std::unique_ptr<FrameGeneratorInterface> frame_generator_;
+
+ int source_fps_ RTC_GUARDED_BY(&lock_);
+ int target_capture_fps_ RTC_GUARDED_BY(&lock_);
+ absl::optional<int> wanted_fps_ RTC_GUARDED_BY(&lock_);
+ VideoRotation fake_rotation_ = kVideoRotation_0;
+ absl::optional<ColorSpace> fake_color_space_ RTC_GUARDED_BY(&lock_);
+
+ int64_t first_frame_capture_time_;
+
+ Mutex stats_lock_;
+ absl::optional<Resolution> source_resolution_ RTC_GUARDED_BY(&stats_lock_);
+
+ // Must be the last field, so it will be deconstructed first as tasks
+ // in the TaskQueue access other fields of the instance of this class.
+ rtc::TaskQueue task_queue_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FRAME_GENERATOR_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/frame_generator_capturer_unittest.cc b/third_party/libwebrtc/test/frame_generator_capturer_unittest.cc
new file mode 100644
index 0000000000..d8371f4efd
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator_capturer_unittest.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/frame_generator_capturer.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+using ::testing::Eq;
+using ::testing::Property;
+
+constexpr int kWidth = 640;
+constexpr int kHeight = 360;
+
+class MockVideoSinkInterfaceVideoFrame
+ : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override));
+ MOCK_METHOD(void, OnDiscardedFrame, (), (override));
+};
+} // namespace
+
+TEST(FrameGeneratorCapturerTest, CreateFromConfig) {
+ GlobalSimulatedTimeController time(Timestamp::Seconds(1000));
+ FrameGeneratorCapturerConfig config;
+ config.squares_video->width = 300;
+ config.squares_video->height = 200;
+ config.squares_video->framerate = 20;
+ auto capturer = FrameGeneratorCapturer::Create(
+ time.GetClock(), *time.GetTaskQueueFactory(), config);
+ testing::StrictMock<MockVideoSinkInterfaceVideoFrame> mock_sink;
+ capturer->AddOrUpdateSink(&mock_sink, rtc::VideoSinkWants());
+ capturer->Start();
+ EXPECT_CALL(mock_sink, OnFrame(Property(&VideoFrame::width, Eq(300))))
+ .Times(21);
+ time.AdvanceTime(TimeDelta::Seconds(1));
+}
+
+TEST(FrameGeneratorCapturerTest, OnOutputFormatRequest) {
+ GlobalSimulatedTimeController time(Timestamp::Seconds(1000));
+ FrameGeneratorCapturerConfig config;
+ config.squares_video->width = kWidth;
+ config.squares_video->height = kHeight;
+ config.squares_video->framerate = 20;
+ auto capturer = FrameGeneratorCapturer::Create(
+ time.GetClock(), *time.GetTaskQueueFactory(), config);
+ testing::StrictMock<MockVideoSinkInterfaceVideoFrame> mock_sink;
+ capturer->AddOrUpdateSink(&mock_sink, rtc::VideoSinkWants());
+ capturer->OnOutputFormatRequest(kWidth / 2, kHeight / 2, /*max_fps=*/10);
+ capturer->Start();
+ EXPECT_CALL(mock_sink, OnFrame(Property(&VideoFrame::width, Eq(kWidth / 2))))
+ .Times(11);
+ time.AdvanceTime(TimeDelta::Seconds(1));
+}
+
+TEST(FrameGeneratorCapturerTest, ChangeResolution) {
+ GlobalSimulatedTimeController time(Timestamp::Seconds(1000));
+ FrameGeneratorCapturerConfig config;
+ config.squares_video->width = kWidth;
+ config.squares_video->height = kHeight;
+ config.squares_video->framerate = 20;
+ auto capturer = FrameGeneratorCapturer::Create(
+ time.GetClock(), *time.GetTaskQueueFactory(), config);
+ EXPECT_FALSE(capturer->GetResolution());
+ capturer->Start();
+ time.AdvanceTime(TimeDelta::Seconds(1));
+ ASSERT_TRUE(capturer->GetResolution());
+ EXPECT_EQ(kWidth, capturer->GetResolution()->width);
+ EXPECT_EQ(kHeight, capturer->GetResolution()->height);
+
+ capturer->ChangeResolution(kWidth / 2, kHeight / 2);
+ time.AdvanceTime(TimeDelta::Seconds(1));
+ ASSERT_TRUE(capturer->GetResolution());
+ EXPECT_EQ(kWidth / 2, capturer->GetResolution()->width);
+ EXPECT_EQ(kHeight / 2, capturer->GetResolution()->height);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_generator_unittest.cc b/third_party/libwebrtc/test/frame_generator_unittest.cc
new file mode 100644
index 0000000000..ece37a547f
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_generator_unittest.cc
@@ -0,0 +1,284 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/frame_generator.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#include <cstdint>
+#include <memory>
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/video/video_frame_buffer.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace test {
+
+constexpr int kFrameWidth = 4;
+constexpr int kFrameHeight = 4;
+constexpr int y_size = kFrameWidth * kFrameHeight;
+constexpr int uv_size = ((kFrameHeight + 1) / 2) * ((kFrameWidth + 1) / 2);
+
+class FrameGeneratorTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ two_frame_yuv_filename_ =
+ test::TempFilename(test::OutputPath(), "2_frame_yuv_file");
+ one_frame_yuv_filename_ =
+ test::TempFilename(test::OutputPath(), "1_frame_yuv_file");
+ two_frame_nv12_filename_ =
+ test::TempFilename(test::OutputPath(), "2_frame_nv12_file");
+ one_frame_nv12_filename_ =
+ test::TempFilename(test::OutputPath(), "1_frame_nv12_file");
+
+ FILE* file = fopen(two_frame_yuv_filename_.c_str(), "wb");
+ WriteYuvFile(file, 0, 0, 0);
+ WriteYuvFile(file, 127, 128, 129);
+ fclose(file);
+ file = fopen(one_frame_yuv_filename_.c_str(), "wb");
+ WriteYuvFile(file, 255, 255, 255);
+ fclose(file);
+ file = fopen(two_frame_nv12_filename_.c_str(), "wb");
+ WriteNV12File(file, 0, 0, 0);
+ WriteNV12File(file, 127, 128, 129);
+ fclose(file);
+ file = fopen(one_frame_nv12_filename_.c_str(), "wb");
+ WriteNV12File(file, 255, 255, 255);
+ fclose(file);
+ }
+
+ void TearDown() override {
+ remove(one_frame_yuv_filename_.c_str());
+ remove(two_frame_yuv_filename_.c_str());
+ remove(one_frame_nv12_filename_.c_str());
+ remove(two_frame_nv12_filename_.c_str());
+ }
+
+ protected:
+ void WriteYuvFile(FILE* file, uint8_t y, uint8_t u, uint8_t v) {
+ RTC_DCHECK(file);
+ std::unique_ptr<uint8_t[]> plane_buffer(new uint8_t[y_size]);
+ memset(plane_buffer.get(), y, y_size);
+ fwrite(plane_buffer.get(), 1, y_size, file);
+ memset(plane_buffer.get(), u, uv_size);
+ fwrite(plane_buffer.get(), 1, uv_size, file);
+ memset(plane_buffer.get(), v, uv_size);
+ fwrite(plane_buffer.get(), 1, uv_size, file);
+ }
+
+ void WriteNV12File(FILE* file, uint8_t y, uint8_t u, uint8_t v) {
+ RTC_DCHECK(file);
+ uint8_t plane_buffer[y_size];
+
+ memset(&plane_buffer, y, y_size);
+ fwrite(&plane_buffer, 1, y_size, file);
+ for (size_t i = 0; i < uv_size; ++i) {
+ plane_buffer[2 * i] = u;
+ plane_buffer[2 * i + 1] = v;
+ }
+ fwrite(&plane_buffer, 1, 2 * uv_size, file);
+ }
+
+ void CheckFrameAndMutate(const FrameGeneratorInterface::VideoFrameData& frame,
+ uint8_t y,
+ uint8_t u,
+ uint8_t v) {
+ // Check that frame is valid, has the correct color and timestamp are clean.
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer =
+ frame.buffer->ToI420();
+ const uint8_t* buffer;
+ buffer = i420_buffer->DataY();
+ for (int i = 0; i < y_size; ++i)
+ ASSERT_EQ(y, buffer[i]);
+ buffer = i420_buffer->DataU();
+ for (int i = 0; i < uv_size; ++i)
+ ASSERT_EQ(u, buffer[i]);
+ buffer = i420_buffer->DataV();
+ for (int i = 0; i < uv_size; ++i)
+ ASSERT_EQ(v, buffer[i]);
+ }
+
+ uint64_t Hash(const FrameGeneratorInterface::VideoFrameData& frame) {
+ // Generate a 64-bit hash from the frame's buffer.
+ uint64_t hash = 19;
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer =
+ frame.buffer->ToI420();
+ const uint8_t* buffer = i420_buffer->DataY();
+ for (int i = 0; i < y_size; ++i) {
+ hash = (37 * hash) + buffer[i];
+ }
+ buffer = i420_buffer->DataU();
+ for (int i = 0; i < uv_size; ++i) {
+ hash = (37 * hash) + buffer[i];
+ }
+ buffer = i420_buffer->DataV();
+ for (int i = 0; i < uv_size; ++i) {
+ hash = (37 * hash) + buffer[i];
+ }
+ return hash;
+ }
+
+ std::string two_frame_yuv_filename_;
+ std::string one_frame_yuv_filename_;
+ std::string two_frame_nv12_filename_;
+ std::string one_frame_nv12_filename_;
+};
+
+TEST_F(FrameGeneratorTest, SingleFrameYuvFile) {
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromYuvFileFrameGenerator(
+ std::vector<std::string>(1, one_frame_yuv_filename_), kFrameWidth,
+ kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+}
+
+TEST_F(FrameGeneratorTest, TwoFrameYuvFile) {
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromYuvFileFrameGenerator(
+ std::vector<std::string>(1, two_frame_yuv_filename_), kFrameWidth,
+ kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, MultipleFrameYuvFiles) {
+ std::vector<std::string> files;
+ files.push_back(two_frame_yuv_filename_);
+ files.push_back(one_frame_yuv_filename_);
+
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromYuvFileFrameGenerator(files, kFrameWidth, kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, TwoFrameYuvFileWithRepeat) {
+ const int kRepeatCount = 3;
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromYuvFileFrameGenerator(
+ std::vector<std::string>(1, two_frame_yuv_filename_), kFrameWidth,
+ kFrameHeight, kRepeatCount));
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, MultipleFrameYuvFilesWithRepeat) {
+ const int kRepeatCount = 3;
+ std::vector<std::string> files;
+ files.push_back(two_frame_yuv_filename_);
+ files.push_back(one_frame_yuv_filename_);
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromYuvFileFrameGenerator(files, kFrameWidth, kFrameHeight,
+ kRepeatCount));
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, SingleFrameNV12File) {
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromNV12FileFrameGenerator(
+ std::vector<std::string>(1, one_frame_nv12_filename_), kFrameWidth,
+ kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+}
+
+TEST_F(FrameGeneratorTest, TwoFrameNV12File) {
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromNV12FileFrameGenerator(
+ std::vector<std::string>(1, two_frame_nv12_filename_), kFrameWidth,
+ kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, MultipleFrameNV12Files) {
+ std::vector<std::string> files;
+ files.push_back(two_frame_nv12_filename_);
+ files.push_back(one_frame_nv12_filename_);
+
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromNV12FileFrameGenerator(files, kFrameWidth, kFrameHeight, 1));
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, TwoFrameNV12FileWithRepeat) {
+ const int kRepeatCount = 3;
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromNV12FileFrameGenerator(
+ std::vector<std::string>(1, two_frame_nv12_filename_), kFrameWidth,
+ kFrameHeight, kRepeatCount));
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, MultipleFrameNV12FilesWithRepeat) {
+ const int kRepeatCount = 3;
+ std::vector<std::string> files;
+ files.push_back(two_frame_nv12_filename_);
+ files.push_back(one_frame_nv12_filename_);
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateFromNV12FileFrameGenerator(files, kFrameWidth, kFrameHeight,
+ kRepeatCount));
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 127, 128, 129);
+ for (int i = 0; i < kRepeatCount; ++i)
+ CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
+ CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
+}
+
+TEST_F(FrameGeneratorTest, SlideGenerator) {
+ const int kGenCount = 9;
+ const int kRepeatCount = 3;
+ std::unique_ptr<FrameGeneratorInterface> generator(
+ CreateSlideFrameGenerator(kFrameWidth, kFrameHeight, kRepeatCount));
+ uint64_t hashes[kGenCount];
+ for (int i = 0; i < kGenCount; ++i) {
+ hashes[i] = Hash(generator->NextFrame());
+ }
+ // Check that the buffer changes only every `kRepeatCount` frames.
+ for (int i = 1; i < kGenCount; ++i) {
+ if (i % kRepeatCount == 0) {
+ EXPECT_NE(hashes[i - 1], hashes[i]);
+ } else {
+ EXPECT_EQ(hashes[i - 1], hashes[i]);
+ }
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_utils.cc b/third_party/libwebrtc/test/frame_utils.cc
new file mode 100644
index 0000000000..b280de1ad1
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_utils.cc
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/frame_utils.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+namespace test {
+
+bool EqualPlane(const uint8_t* data1,
+ const uint8_t* data2,
+ int stride1,
+ int stride2,
+ int width,
+ int height) {
+ for (int y = 0; y < height; ++y) {
+ if (memcmp(data1, data2, width) != 0)
+ return false;
+ data1 += stride1;
+ data2 += stride2;
+ }
+ return true;
+}
+
+bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) {
+ if (f1.timestamp() != f2.timestamp() ||
+ f1.ntp_time_ms() != f2.ntp_time_ms() ||
+ f1.render_time_ms() != f2.render_time_ms()) {
+ return false;
+ }
+ return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer());
+}
+
+bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2) {
+ if (f1 == f2) {
+ return true;
+ }
+ // Exlude nullptr (except if both are nullptr, as above)
+ if (!f1 || !f2) {
+ return false;
+ }
+
+ if (f1->width() != f2->width() || f1->height() != f2->height() ||
+ f1->type() != f2->type()) {
+ return false;
+ }
+
+ rtc::scoped_refptr<webrtc::I420BufferInterface> f1_i420 = f1->ToI420();
+ rtc::scoped_refptr<webrtc::I420BufferInterface> f2_i420 = f2->ToI420();
+ return EqualPlane(f1_i420->DataY(), f2_i420->DataY(), f1_i420->StrideY(),
+ f2_i420->StrideY(), f1_i420->width(), f1_i420->height()) &&
+ EqualPlane(f1_i420->DataU(), f2_i420->DataU(), f1_i420->StrideU(),
+ f2_i420->StrideU(), f1_i420->ChromaWidth(),
+ f1_i420->ChromaHeight()) &&
+ EqualPlane(f1_i420->DataV(), f2_i420->DataV(), f1_i420->StrideV(),
+ f2_i420->StrideV(), f1_i420->ChromaWidth(),
+ f1_i420->ChromaHeight());
+}
+
+rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE* f) {
+ int half_width = (width + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> buffer(
+ // Explicit stride, no padding between rows.
+ I420Buffer::Create(width, height, width, half_width, half_width));
+ size_t size_y = static_cast<size_t>(width) * height;
+ size_t size_uv = static_cast<size_t>(half_width) * ((height + 1) / 2);
+
+ if (fread(buffer->MutableDataY(), 1, size_y, f) < size_y)
+ return nullptr;
+ if (fread(buffer->MutableDataU(), 1, size_uv, f) < size_uv)
+ return nullptr;
+ if (fread(buffer->MutableDataV(), 1, size_uv, f) < size_uv)
+ return nullptr;
+ return buffer;
+}
+
+rtc::scoped_refptr<NV12Buffer> ReadNV12Buffer(int width, int height, FILE* f) {
+ rtc::scoped_refptr<NV12Buffer> buffer(NV12Buffer::Create(width, height));
+ size_t size_y = static_cast<size_t>(width) * height;
+ size_t size_uv = static_cast<size_t>(width + width % 2) * ((height + 1) / 2);
+
+ if (fread(buffer->MutableDataY(), 1, size_y, f) < size_y)
+ return nullptr;
+ if (fread(buffer->MutableDataUV(), 1, size_uv, f) < size_uv)
+ return nullptr;
+ return buffer;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/frame_utils.h b/third_party/libwebrtc/test/frame_utils.h
new file mode 100644
index 0000000000..1f2b381afb
--- /dev/null
+++ b/third_party/libwebrtc/test/frame_utils.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_FRAME_UTILS_H_
+#define TEST_FRAME_UTILS_H_
+
+#include <stdint.h>
+
+#include "api/scoped_refptr.h"
+#include "api/video/nv12_buffer.h"
+
+namespace webrtc {
+class I420Buffer;
+class VideoFrame;
+class VideoFrameBuffer;
+namespace test {
+
+bool EqualPlane(const uint8_t* data1,
+ const uint8_t* data2,
+ int stride1,
+ int stride2,
+ int width,
+ int height);
+
+static inline bool EqualPlane(const uint8_t* data1,
+ const uint8_t* data2,
+ int stride,
+ int width,
+ int height) {
+ return EqualPlane(data1, data2, stride, stride, width, height);
+}
+
+bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2);
+
+bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2);
+
+rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE*);
+
+rtc::scoped_refptr<NV12Buffer> ReadNV12Buffer(int width, int height, FILE*);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FRAME_UTILS_H_
diff --git a/third_party/libwebrtc/test/function_audio_decoder_factory.h b/third_party/libwebrtc/test/function_audio_decoder_factory.h
new file mode 100644
index 0000000000..8464f3d9aa
--- /dev/null
+++ b/third_party/libwebrtc/test/function_audio_decoder_factory.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUNCTION_AUDIO_DECODER_FACTORY_H_
+#define TEST_FUNCTION_AUDIO_DECODER_FACTORY_H_
+
+#include <functional>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_format.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+// A decoder factory producing decoders by calling a supplied create function.
+class FunctionAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ explicit FunctionAudioDecoderFactory(
+ std::function<std::unique_ptr<AudioDecoder>()> create)
+ : create_([create](const SdpAudioFormat&,
+ absl::optional<AudioCodecPairId> codec_pair_id) {
+ return create();
+ }) {}
+ explicit FunctionAudioDecoderFactory(
+ std::function<std::unique_ptr<AudioDecoder>(
+ const SdpAudioFormat&,
+ absl::optional<AudioCodecPairId> codec_pair_id)> create)
+ : create_(std::move(create)) {}
+
+ // Unused by tests.
+ std::vector<AudioCodecSpec> GetSupportedDecoders() override {
+ RTC_DCHECK_NOTREACHED();
+ return {};
+ }
+
+ bool IsSupportedDecoder(const SdpAudioFormat& format) override {
+ return true;
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
+ return create_(format, codec_pair_id);
+ }
+
+ private:
+ const std::function<std::unique_ptr<AudioDecoder>(
+ const SdpAudioFormat&,
+ absl::optional<AudioCodecPairId> codec_pair_id)>
+ create_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FUNCTION_AUDIO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/test/fuzzers/BUILD.gn b/third_party/libwebrtc/test/fuzzers/BUILD.gn
new file mode 100644
index 0000000000..c3576713ab
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/BUILD.gn
@@ -0,0 +1,689 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/config/features.gni")
+import("//testing/libfuzzer/fuzzer_test.gni")
+import("../../webrtc.gni")
+
+rtc_library("webrtc_fuzzer_main") {
+ sources = [ "webrtc_fuzzer_main.cc" ]
+ deps = [
+ "../../rtc_base:logging",
+ "//testing/libfuzzer:libfuzzer_main",
+ ]
+
+ # When WebRTC fuzzer tests are built on Chromium bots they need to link
+ # with Chromium's implementation of metrics, field trial, and system time.
+ if (build_with_chromium) {
+ deps += [
+ "../../../webrtc_overrides:field_trial",
+ "../../../webrtc_overrides:metrics",
+ "../../../webrtc_overrides:system_time",
+ ]
+ }
+}
+
+rtc_library("fuzz_data_helper") {
+ testonly = true
+ sources = [
+ "fuzz_data_helper.cc",
+ "fuzz_data_helper.h",
+ ]
+ deps = [
+ "../../api:array_view",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+ visibility = [ ":*" ] # Only targets in this file can depend on this.
+}
+
+set_defaults("webrtc_fuzzer_test") {
+ configs = rtc_add_configs
+ absl_deps = []
+}
+
+template("webrtc_fuzzer_test") {
+ fuzzer_test(target_name) {
+ forward_variables_from(invoker, "*")
+ deps += [
+ ":fuzz_data_helper",
+ ":webrtc_fuzzer_main",
+ ]
+ additional_configs = configs
+
+ # If absl_deps is [], no action is needed. If not [], then it needs to be
+ # converted to //third_party/abseil-cpp:absl when build_with_chromium=true
+ # otherwise it just needs to be added to deps.
+ if (absl_deps != []) {
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (build_with_chromium) {
+ deps += [ "//third_party/abseil-cpp:absl" ]
+ } else {
+ deps += absl_deps
+ }
+ }
+
+ if (!build_with_chromium && is_clang) {
+ suppressed_configs = [ "//build/config/clang:find_bad_constructs" ]
+ }
+ }
+}
+
+webrtc_fuzzer_test("h264_depacketizer_fuzzer") {
+ sources = [ "h264_depacketizer_fuzzer.cc" ]
+ deps = [ "../../modules/rtp_rtcp" ]
+ seed_corpus = "corpora/h264-depacketizer-fuzzer-corpus"
+}
+
+webrtc_fuzzer_test("vp8_depacketizer_fuzzer") {
+ sources = [ "vp8_depacketizer_fuzzer.cc" ]
+ deps = [
+ "../../api:array_view",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_video_header",
+ ]
+}
+
+webrtc_fuzzer_test("vp9_depacketizer_fuzzer") {
+ sources = [ "vp9_depacketizer_fuzzer.cc" ]
+ deps = [
+ "../../api:array_view",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_video_header",
+ ]
+}
+
+webrtc_fuzzer_test("vp8_qp_parser_fuzzer") {
+ sources = [ "vp8_qp_parser_fuzzer.cc" ]
+ deps = [
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding/",
+ ]
+}
+
+webrtc_fuzzer_test("vp9_qp_parser_fuzzer") {
+ sources = [ "vp9_qp_parser_fuzzer.cc" ]
+ deps = [
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding/",
+ ]
+}
+
+webrtc_fuzzer_test("h264_bitstream_parser_fuzzer") {
+ sources = [ "h264_bitstream_parser_fuzzer.cc" ]
+ deps = [
+ "../../common_video",
+ "../../modules/video_coding/",
+ ]
+}
+
+webrtc_fuzzer_test("forward_error_correction_fuzzer") {
+ sources = [ "forward_error_correction_fuzzer.cc" ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:byte_buffer",
+ ]
+}
+
+webrtc_fuzzer_test("flexfec_header_reader_fuzzer") {
+ sources = [ "flexfec_header_reader_fuzzer.cc" ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("flexfec_sender_fuzzer") {
+ sources = [ "flexfec_sender_fuzzer.cc" ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../system_wrappers",
+ ]
+}
+
+webrtc_fuzzer_test("ulpfec_header_reader_fuzzer") {
+ sources = [ "ulpfec_header_reader_fuzzer.cc" ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:fec_test_helper",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("ulpfec_generator_fuzzer") {
+ sources = [ "ulpfec_generator_fuzzer.cc" ]
+ deps = [
+ "../../modules:module_api_public",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:fec_test_helper",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ "../../rtc_base:copy_on_write_buffer",
+ "../../system_wrappers",
+ ]
+}
+
+webrtc_fuzzer_test("ulpfec_receiver_fuzzer") {
+ sources = [ "ulpfec_receiver_fuzzer.cc" ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("flexfec_receiver_fuzzer") {
+ sources = [ "flexfec_receiver_fuzzer.cc" ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("rtp_video_frame_assembler_fuzzer") {
+ sources = [ "rtp_video_frame_assembler_fuzzer.cc" ]
+ deps = [
+ "../../api/video:rtp_video_frame_assembler",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("rtcp_receiver_fuzzer") {
+ sources = [ "rtcp_receiver_fuzzer.cc" ]
+ deps = [
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ "../../system_wrappers",
+ ]
+ seed_corpus = "corpora/rtcp-corpus"
+}
+
+webrtc_fuzzer_test("rtp_packet_fuzzer") {
+ sources = [ "rtp_packet_fuzzer.cc" ]
+ deps = [ "../../modules/rtp_rtcp:rtp_rtcp_format" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ seed_corpus = "corpora/rtp-corpus"
+}
+
+webrtc_fuzzer_test("rtp_packetizer_av1_fuzzer") {
+ sources = [ "rtp_packetizer_av1_fuzzer.cc" ]
+ deps = [
+ "../../api/video:video_frame_type",
+ "../../modules/rtp_rtcp:rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ ]
+}
+
+webrtc_fuzzer_test("congestion_controller_feedback_fuzzer") {
+ sources = [ "congestion_controller_feedback_fuzzer.cc" ]
+ deps = [
+ "../../modules/congestion_controller",
+ "../../modules/pacing",
+ "../../modules/remote_bitrate_estimator",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/functional:bind_front" ]
+}
+
+rtc_library("audio_decoder_fuzzer") {
+ testonly = true
+ sources = [
+ "audio_decoder_fuzzer.cc",
+ "audio_decoder_fuzzer.h",
+ ]
+ deps = [
+ "../../api/audio_codecs:audio_codecs_api",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+webrtc_fuzzer_test("audio_decoder_g722_fuzzer") {
+ sources = [ "audio_decoder_g722_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:g722",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") {
+ sources = [ "audio_decoder_ilbc_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:ilbc",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_opus_fuzzer") {
+ sources = [ "audio_decoder_opus_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:webrtc_opus",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_opus_redundant_fuzzer") {
+ sources = [ "audio_decoder_opus_redundant_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:webrtc_opus",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_multiopus_fuzzer") {
+ sources = [ "audio_decoder_multistream_opus_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../api/audio_codecs/opus:audio_decoder_multiopus",
+ "../../api/audio_codecs/opus:audio_decoder_opus_config",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_pcm_fuzzer") {
+ sources = [ "audio_decoder_pcm_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:g711",
+ ]
+}
+
+webrtc_fuzzer_test("audio_decoder_pcm16b_fuzzer") {
+ sources = [ "audio_decoder_pcm16b_fuzzer.cc" ]
+ deps = [
+ ":audio_decoder_fuzzer",
+ "../../modules/audio_coding:pcm16b",
+ ]
+}
+
+rtc_library("audio_encoder_fuzzer") {
+ testonly = true
+ sources = [
+ "audio_encoder_fuzzer.cc",
+ "audio_encoder_fuzzer.h",
+ ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../api:array_view",
+ "../../api/audio_codecs:audio_codecs_api",
+ "../../rtc_base:buffer",
+ "../../rtc_base:checks",
+ ]
+}
+
+webrtc_fuzzer_test("audio_encoder_opus_fuzzer") {
+ sources = [ "audio_encoder_opus_fuzzer.cc" ]
+ deps = [
+ ":audio_encoder_fuzzer",
+ "../../api/audio_codecs/opus:audio_encoder_opus",
+ "../../rtc_base:checks",
+ ]
+}
+
+webrtc_fuzzer_test("turn_unwrap_fuzzer") {
+ sources = [ "turn_unwrap_fuzzer.cc" ]
+ deps = [
+ "../../media",
+ "../../media:rtc_media_base",
+ "../../media:turn_utils",
+ ]
+}
+
+webrtc_fuzzer_test("neteq_rtp_fuzzer") {
+ sources = [ "neteq_rtp_fuzzer.cc" ]
+ deps = [
+ "../../api:array_view",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../modules/audio_coding:neteq",
+ "../../modules/audio_coding:neteq_test_tools",
+ "../../modules/audio_coding:neteq_tools_minimal",
+ "../../modules/audio_coding:pcm16b",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ ]
+}
+
+webrtc_fuzzer_test("neteq_signal_fuzzer") {
+ sources = [ "neteq_signal_fuzzer.cc" ]
+ deps = [
+ "../../api:array_view",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../modules/audio_coding:neteq",
+ "../../modules/audio_coding:neteq_test_tools",
+ "../../modules/audio_coding:neteq_tools_minimal",
+ "../../modules/audio_coding:pcm16b",
+ "../../rtc_base:random",
+ "../../rtc_base:safe_conversions",
+ ]
+}
+
+webrtc_fuzzer_test("residual_echo_detector_fuzzer") {
+ sources = [ "residual_echo_detector_fuzzer.cc" ]
+ deps = [
+ "../../api/audio:echo_detector_creator",
+ "../../rtc_base:checks",
+ "../../rtc_base:refcount",
+ ]
+}
+
+webrtc_fuzzer_test("sdp_parser_fuzzer") {
+ sources = [ "sdp_parser_fuzzer.cc" ]
+ deps = [
+ "../../api:libjingle_peerconnection_api",
+ "../../pc:libjingle_peerconnection",
+ ]
+ seed_corpus = "corpora/sdp-corpus"
+}
+
+if (!build_with_chromium) {
+ # This target depends on test infrastructure that can't be built
+ # with Chromium at the moment.
+ # TODO(bugs.chromium.org/12534): Make this fuzzer build in Chromium.
+
+ webrtc_fuzzer_test("sdp_integration_fuzzer") {
+ sources = [ "sdp_integration_fuzzer.cc" ]
+ deps = [
+ "../../api:libjingle_peerconnection_api",
+ "../../pc:integration_test_helpers",
+ "../../pc:libjingle_peerconnection",
+ "../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ seed_corpus = "corpora/sdp-corpus"
+ }
+}
+
+webrtc_fuzzer_test("stun_parser_fuzzer") {
+ sources = [ "stun_parser_fuzzer.cc" ]
+ deps = [
+ "../../api/transport:stun_types",
+ "../../p2p:rtc_p2p",
+ ]
+ seed_corpus = "corpora/stun-corpus"
+ dict = "corpora/stun.tokens"
+}
+
+webrtc_fuzzer_test("stun_validator_fuzzer") {
+ sources = [ "stun_validator_fuzzer.cc" ]
+ deps = [
+ "../../api/transport:stun_types",
+ "../../p2p:rtc_p2p",
+ ]
+ seed_corpus = "corpora/stun-corpus"
+ dict = "corpora/stun.tokens"
+}
+
+webrtc_fuzzer_test("pseudotcp_parser_fuzzer") {
+ sources = [ "pseudotcp_parser_fuzzer.cc" ]
+ deps = [
+ "../../p2p:rtc_p2p",
+ "../../rtc_base:threading",
+ ]
+}
+
+rtc_library("audio_processing_fuzzer_helper") {
+ testonly = true
+ sources = [
+ "audio_processing_fuzzer_helper.cc",
+ "audio_processing_fuzzer_helper.h",
+ ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../api/audio:audio_frame_api",
+ "../../modules/audio_processing",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_frame_proxies",
+ "../../rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+webrtc_fuzzer_test("audio_processing_fuzzer") {
+ sources = [ "audio_processing_configs_fuzzer.cc" ]
+ deps = [
+ ":audio_processing_fuzzer_helper",
+ "../../api:scoped_refptr",
+ "../../api/audio:aec3_factory",
+ "../../api/audio:echo_detector_creator",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../modules/audio_processing",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_buffer",
+ "../../modules/audio_processing:audioproc_test_utils",
+ "../../modules/audio_processing/aec3",
+ "../../modules/audio_processing/aec_dump",
+ "../../modules/audio_processing/aec_dump:aec_dump_impl",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:safe_minmax",
+ "../../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ seed_corpus = "corpora/audio_processing-corpus"
+}
+
+webrtc_fuzzer_test("audio_processing_sample_rate_fuzzer") {
+ sources = [ "audio_processing_sample_rate_fuzzer.cc" ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../api/audio:audio_frame_api",
+ "../../modules/audio_processing",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_frame_proxies",
+ "../../modules/audio_processing:audioproc_test_utils",
+ "../../rtc_base:checks",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:safe_minmax",
+ ]
+}
+
+webrtc_fuzzer_test("agc_fuzzer") {
+ sources = [ "agc_fuzzer.cc" ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../modules/audio_processing",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_buffer",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:safe_minmax",
+ ]
+
+ seed_corpus = "corpora/agc-corpus"
+}
+
+webrtc_fuzzer_test("aec3_config_json_fuzzer") {
+ sources = [ "aec3_config_json_fuzzer.cc" ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../api/audio:aec3_config",
+ "../../api/audio:aec3_config_json",
+ ]
+ dict = "//testing/libfuzzer/fuzzers/dicts/json.dict"
+ seed_corpus = "corpora/aec3-config-json-corpus"
+}
+
+webrtc_fuzzer_test("aec3_fuzzer") {
+ defines = []
+ if (apm_debug_dump) {
+ defines += [ "WEBRTC_APM_DEBUG_DUMP=1" ]
+ } else {
+ defines += [ "WEBRTC_APM_DEBUG_DUMP=0" ]
+ }
+ sources = [ "aec3_fuzzer.cc" ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../modules/audio_processing:api",
+ "../../modules/audio_processing:audio_buffer",
+ "../../modules/audio_processing/aec3",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+webrtc_fuzzer_test("comfort_noise_decoder_fuzzer") {
+ sources = [ "comfort_noise_decoder_fuzzer.cc" ]
+ deps = [
+ "../../api:array_view",
+ "../../modules/audio_coding:webrtc_cng",
+ "../../rtc_base:buffer",
+ ]
+}
+
+webrtc_fuzzer_test("rtp_depacketizer_av1_assemble_frame_fuzzer") {
+ sources = [ "rtp_depacketizer_av1_assemble_frame_fuzzer.cc" ]
+ seed_corpus = "corpora/rtp-depacketizer-av1-assemble-frame-corpus"
+ deps = [
+ ":fuzz_data_helper",
+ "../../api:array_view",
+ "../../modules/rtp_rtcp",
+ ]
+}
+
+webrtc_fuzzer_test("rtp_dependency_descriptor_fuzzer") {
+ sources = [ "rtp_dependency_descriptor_fuzzer.cc" ]
+ seed_corpus = "corpora/dependency_descriptor-corpus"
+ deps = [
+ "../../api:array_view",
+ "../../common_video/generic_frame_descriptor",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ ]
+}
+
+webrtc_fuzzer_test("rtp_video_layers_allocation_fuzzer") {
+ sources = [ "rtp_video_layers_allocation_fuzzer.cc" ]
+ seed_corpus = "corpora/video_layers_allocation-corpus"
+ deps = [
+ "../../api:array_view",
+ "../../api/video:video_layers_allocation",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../rtc_base:checks",
+ ]
+
+ # video_layers_allocation is an rtp header extension and thus can't be longer
+ # than 255 bytes on the wire.
+ libfuzzer_options = [ "max_len=255" ]
+}
+
+webrtc_fuzzer_test("rtp_frame_reference_finder_fuzzer") {
+ sources = [ "rtp_frame_reference_finder_fuzzer.cc" ]
+ deps = [
+ "../../api:rtp_packet_info",
+ "../../api:scoped_refptr",
+ "../../modules/video_coding/",
+ "../../system_wrappers",
+ ]
+}
+
+webrtc_fuzzer_test("frame_buffer2_fuzzer") {
+ sources = [ "frame_buffer2_fuzzer.cc" ]
+ deps = [
+ "../../api/task_queue",
+ "../../modules/video_coding:frame_buffer2",
+ "../../modules/video_coding/timing:timing_module",
+ "../../test:scoped_key_value_config",
+ "../time_controller:time_controller",
+ ]
+}
+
+webrtc_fuzzer_test("frame_buffer_fuzzer") {
+ sources = [ "frame_buffer_fuzzer.cc" ]
+ deps = [
+ ":fuzz_data_helper",
+ "../../api:array_view",
+ "../../api/video:encoded_frame",
+ "../../api/video:frame_buffer",
+ "../../rtc_base:rtc_numerics",
+ "../../test:scoped_key_value_config",
+ ]
+}
+
+webrtc_fuzzer_test("field_trial_fuzzer") {
+ sources = [ "field_trial_fuzzer.cc" ]
+ deps = [ "../../system_wrappers:field_trial" ]
+ seed_corpus = "corpora/field_trial-corpus"
+}
+
+webrtc_fuzzer_test("string_to_number_fuzzer") {
+ sources = [ "string_to_number_fuzzer.cc" ]
+ deps = [ "../../rtc_base:stringutils" ]
+ seed_corpus = "corpora/string_to_number-corpus"
+}
+
+webrtc_fuzzer_test("sctp_utils_fuzzer") {
+ sources = [ "sctp_utils_fuzzer.cc" ]
+ deps = [
+ "../../api:libjingle_peerconnection_api",
+ "../../pc:libjingle_peerconnection",
+ "../../pc:sctp_utils",
+ "../../rtc_base:copy_on_write_buffer",
+ ]
+}
+
+webrtc_fuzzer_test("dcsctp_socket_fuzzer") {
+ sources = [ "dcsctp_socket_fuzzer.cc" ]
+ deps = [
+ "../../net/dcsctp/fuzzers:dcsctp_fuzzers",
+ "../../net/dcsctp/public:socket",
+ "../../net/dcsctp/public:types",
+ "../../net/dcsctp/socket:dcsctp_socket",
+ "../../rtc_base:logging",
+ ]
+}
+
+webrtc_fuzzer_test("ssl_certificate_fuzzer") {
+ sources = [ "ssl_certificate_fuzzer.cc" ]
+ deps = [
+ "../:rtp_test_utils",
+ "../../rtc_base:ssl",
+ "../../rtc_base:stringutils",
+ ]
+}
+
+webrtc_fuzzer_test("vp8_replay_fuzzer") {
+ sources = [ "vp8_replay_fuzzer.cc" ]
+ deps = [ "utils:rtp_replayer" ]
+ seed_corpus = "corpora/rtpdump-corpus/vp8"
+}
+
+if (rtc_build_libvpx) {
+ webrtc_fuzzer_test("vp9_encoder_references_fuzzer") {
+ sources = [ "vp9_encoder_references_fuzzer.cc" ]
+ deps = [
+ "..:test_support",
+ "../../api:array_view",
+ "../../api:field_trials_view",
+ "../../api/video:video_frame",
+ "../../api/video_codecs:video_codecs_api",
+ "../../modules/video_coding:frame_dependencies_calculator",
+ "../../modules/video_coding:webrtc_libvpx_interface",
+ "../../modules/video_coding:webrtc_vp9",
+ "../../rtc_base:safe_compare",
+ rtc_libvpx_dir,
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ ]
+ seed_corpus = "corpora/vp9-encoder-references-corpus"
+ defines = [ "RTC_ENABLE_VP9" ]
+ }
+}
+
+webrtc_fuzzer_test("vp9_replay_fuzzer") {
+ sources = [ "vp9_replay_fuzzer.cc" ]
+ deps = [ "utils:rtp_replayer" ]
+ seed_corpus = "corpora/rtpdump-corpus/vp9"
+}
diff --git a/third_party/libwebrtc/test/fuzzers/DEPS b/third_party/libwebrtc/test/fuzzers/DEPS
new file mode 100644
index 0000000000..50b1c8adce
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+audio",
+ "+pc",
+ "+net/dcsctp",
+]
diff --git a/third_party/libwebrtc/test/fuzzers/OWNERS b/third_party/libwebrtc/test/fuzzers/OWNERS
new file mode 100644
index 0000000000..3d97816b3b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/OWNERS
@@ -0,0 +1,3 @@
+danilchap@webrtc.org
+henrik.lundin@webrtc.org
+saza@webrtc.org
diff --git a/third_party/libwebrtc/test/fuzzers/aec3_config_json_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/aec3_config_json_fuzzer.cc
new file mode 100644
index 0000000000..626350c52c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/aec3_config_json_fuzzer.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#include "api/audio/echo_canceller3_config.h"
+#include "api/audio/echo_canceller3_config_json.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000) {
+ return;
+ }
+ std::string config_json(reinterpret_cast<const char*>(data), size);
+
+ EchoCanceller3Config config;
+ bool success;
+ Aec3ConfigFromJsonString(config_json, &config, &success);
+ EchoCanceller3Config::Validate(&config);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/aec3_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/aec3_fuzzer.cc
new file mode 100644
index 0000000000..a12ca30f63
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/aec3_fuzzer.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "absl/types/optional.h"
+#include "modules/audio_processing/aec3/echo_canceller3.h"
+#include "modules/audio_processing/audio_buffer.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace {
+using SampleRate = ::webrtc::AudioProcessing::NativeRate;
+
+void PrepareAudioBuffer(int sample_rate_hz,
+ test::FuzzDataHelper* fuzz_data,
+ AudioBuffer* buffer) {
+ float* const* channels = buffer->channels_f();
+ for (size_t i = 0; i < buffer->num_channels(); ++i) {
+ for (size_t j = 0; j < buffer->num_frames(); ++j) {
+ channels[i][j] =
+ static_cast<float>(fuzz_data->ReadOrDefaultValue<int16_t>(0));
+ }
+ }
+ if (sample_rate_hz == 32000 || sample_rate_hz == 48000) {
+ buffer->SplitIntoFrequencyBands();
+ }
+}
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 200000) {
+ return;
+ }
+
+ test::FuzzDataHelper fuzz_data(rtc::ArrayView<const uint8_t>(data, size));
+
+ constexpr int kSampleRates[] = {16000, 32000, 48000};
+ const int sample_rate_hz =
+ static_cast<size_t>(fuzz_data.SelectOneOf(kSampleRates));
+
+ constexpr int kMaxNumChannels = 9;
+ const size_t num_render_channels =
+ 1 + fuzz_data.ReadOrDefaultValue<uint8_t>(0) % (kMaxNumChannels - 1);
+ const size_t num_capture_channels =
+ 1 + fuzz_data.ReadOrDefaultValue<uint8_t>(0) % (kMaxNumChannels - 1);
+
+ EchoCanceller3 aec3(EchoCanceller3Config(),
+ /*multichannel_config=*/absl::nullopt, sample_rate_hz,
+ num_render_channels, num_capture_channels);
+
+ AudioBuffer capture_audio(sample_rate_hz, num_capture_channels,
+ sample_rate_hz, num_capture_channels,
+ sample_rate_hz, num_capture_channels);
+ AudioBuffer render_audio(sample_rate_hz, num_render_channels, sample_rate_hz,
+ num_render_channels, sample_rate_hz,
+ num_render_channels);
+
+ // Fuzz frames while there is still fuzzer data.
+ while (fuzz_data.BytesLeft() > 0) {
+ bool is_capture = fuzz_data.ReadOrDefaultValue(true);
+ bool level_changed = fuzz_data.ReadOrDefaultValue(true);
+ if (is_capture) {
+ PrepareAudioBuffer(sample_rate_hz, &fuzz_data, &capture_audio);
+ aec3.ProcessCapture(&capture_audio, level_changed);
+ } else {
+ PrepareAudioBuffer(sample_rate_hz, &fuzz_data, &render_audio);
+ aec3.AnalyzeRender(&render_audio);
+ }
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/agc_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/agc_fuzzer.cc
new file mode 100644
index 0000000000..597103993e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/agc_fuzzer.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "modules/audio_processing/audio_buffer.h"
+#include "modules/audio_processing/gain_control_impl.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/numerics/safe_minmax.h"
+#include "rtc_base/thread_annotations.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace {
+
+void FillAudioBuffer(size_t sample_rate_hz,
+ test::FuzzDataHelper* fuzz_data,
+ AudioBuffer* buffer) {
+ float* const* channels = buffer->channels_f();
+ for (size_t i = 0; i < buffer->num_channels(); ++i) {
+ for (size_t j = 0; j < buffer->num_frames(); ++j) {
+ channels[i][j] =
+ static_cast<float>(fuzz_data->ReadOrDefaultValue<int16_t>(0));
+ }
+ }
+
+ if (sample_rate_hz != 16000) {
+ buffer->SplitIntoFrequencyBands();
+ }
+}
+
+// This function calls the GainControl functions that are overriden as private
+// in GainControlInterface.
+void FuzzGainControllerConfig(test::FuzzDataHelper* fuzz_data,
+ GainControl* gc) {
+ GainControl::Mode modes[] = {GainControl::Mode::kAdaptiveAnalog,
+ GainControl::Mode::kAdaptiveDigital,
+ GainControl::Mode::kFixedDigital};
+ GainControl::Mode mode = fuzz_data->SelectOneOf(modes);
+ const bool enable_limiter = fuzz_data->ReadOrDefaultValue(true);
+ // The values are capped to comply with the API of webrtc::GainControl.
+ const int analog_level_min =
+ rtc::SafeClamp<int>(fuzz_data->ReadOrDefaultValue<uint16_t>(0), 0, 65534);
+ const int analog_level_max =
+ rtc::SafeClamp<int>(fuzz_data->ReadOrDefaultValue<uint16_t>(65535),
+ analog_level_min + 1, 65535);
+ const int stream_analog_level =
+ rtc::SafeClamp<int>(fuzz_data->ReadOrDefaultValue<uint16_t>(30000),
+ analog_level_min, analog_level_max);
+ const int gain =
+ rtc::SafeClamp<int>(fuzz_data->ReadOrDefaultValue<int8_t>(30), -1, 100);
+ const int target_level_dbfs =
+ rtc::SafeClamp<int>(fuzz_data->ReadOrDefaultValue<int8_t>(15), -1, 35);
+
+ gc->set_mode(mode);
+ gc->enable_limiter(enable_limiter);
+ if (mode == GainControl::Mode::kAdaptiveAnalog) {
+ gc->set_analog_level_limits(analog_level_min, analog_level_max);
+ gc->set_stream_analog_level(stream_analog_level);
+ }
+ gc->set_compression_gain_db(gain);
+ gc->set_target_level_dbfs(target_level_dbfs);
+
+ static_cast<void>(gc->mode());
+ static_cast<void>(gc->analog_level_minimum());
+ static_cast<void>(gc->analog_level_maximum());
+ static_cast<void>(gc->stream_analog_level());
+ static_cast<void>(gc->compression_gain_db());
+ static_cast<void>(gc->stream_is_saturated());
+ static_cast<void>(gc->target_level_dbfs());
+ static_cast<void>(gc->is_limiter_enabled());
+}
+
+void FuzzGainController(test::FuzzDataHelper* fuzz_data, GainControlImpl* gci) {
+ using Rate = ::webrtc::AudioProcessing::NativeRate;
+ const Rate rate_kinds[] = {Rate::kSampleRate16kHz, Rate::kSampleRate32kHz,
+ Rate::kSampleRate48kHz};
+
+ const auto sample_rate_hz =
+ static_cast<size_t>(fuzz_data->SelectOneOf(rate_kinds));
+ const size_t samples_per_frame = sample_rate_hz / 100;
+ const size_t num_channels = fuzz_data->ReadOrDefaultValue(true) ? 2 : 1;
+
+ gci->Initialize(num_channels, sample_rate_hz);
+ FuzzGainControllerConfig(fuzz_data, gci);
+
+ // The audio buffer is used for both capture and render.
+ AudioBuffer audio(sample_rate_hz, num_channels, sample_rate_hz,
+ num_channels, sample_rate_hz, num_channels);
+
+ std::vector<int16_t> packed_render_audio(samples_per_frame);
+
+ while (fuzz_data->CanReadBytes(1)) {
+ FillAudioBuffer(sample_rate_hz, fuzz_data, &audio);
+
+ const bool stream_has_echo = fuzz_data->ReadOrDefaultValue(true);
+ gci->AnalyzeCaptureAudio(audio);
+ gci->ProcessCaptureAudio(&audio, stream_has_echo);
+
+ FillAudioBuffer(sample_rate_hz, fuzz_data, &audio);
+
+ gci->PackRenderAudioBuffer(audio, &packed_render_audio);
+ gci->ProcessRenderAudio(packed_render_audio);
+ }
+}
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 200000) {
+ return;
+ }
+ test::FuzzDataHelper fuzz_data(rtc::ArrayView<const uint8_t>(data, size));
+ auto gci = std::make_unique<GainControlImpl>();
+ FuzzGainController(&fuzz_data, gci.get());
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.cc
new file mode 100644
index 0000000000..1db332eeb5
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.cc
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+#include <limits>
+
+#include "absl/types/optional.h"
+#include "api/audio_codecs/audio_decoder.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+template <typename T, unsigned int B = sizeof(T)>
+bool ParseInt(const uint8_t** data, size_t* remaining_size, T* value) {
+ static_assert(std::numeric_limits<T>::is_integer, "Type must be an integer.");
+ static_assert(sizeof(T) <= sizeof(uint64_t),
+ "Cannot read wider than uint64_t.");
+ static_assert(B <= sizeof(T), "T must be at least B bytes wide.");
+ if (B > *remaining_size)
+ return false;
+ uint64_t val = ByteReader<uint64_t, B>::ReadBigEndian(*data);
+ *data += B;
+ *remaining_size -= B;
+ *value = static_cast<T>(val);
+ return true;
+}
+} // namespace
+
+// This function reads two bytes from the beginning of `data`, interprets them
+// as the first packet length, and reads this many bytes if available. The
+// payload is inserted into the decoder, and the process continues until no more
+// data is available. Either AudioDecoder::Decode or
+// AudioDecoder::DecodeRedundant is used, depending on the value of
+// `decode_type`.
+void FuzzAudioDecoder(DecoderFunctionType decode_type,
+ const uint8_t* data,
+ size_t size,
+ AudioDecoder* decoder,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded) {
+ const uint8_t* data_ptr = data;
+ size_t remaining_size = size;
+ size_t packet_len;
+ constexpr size_t kMaxNumFuzzedPackets = 200;
+ for (size_t num_packets = 0; num_packets < kMaxNumFuzzedPackets;
+ ++num_packets) {
+ if (!(ParseInt<size_t, 2>(&data_ptr, &remaining_size, &packet_len) &&
+ packet_len <= remaining_size)) {
+ break;
+ }
+ AudioDecoder::SpeechType speech_type;
+ switch (decode_type) {
+ case DecoderFunctionType::kNormalDecode:
+ decoder->Decode(data_ptr, packet_len, sample_rate_hz, max_decoded_bytes,
+ decoded, &speech_type);
+ break;
+ case DecoderFunctionType::kRedundantDecode:
+ decoder->DecodeRedundant(data_ptr, packet_len, sample_rate_hz,
+ max_decoded_bytes, decoded, &speech_type);
+ break;
+ }
+ data_ptr += packet_len;
+ remaining_size -= packet_len;
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.h b/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.h
new file mode 100644
index 0000000000..8c5246bb5e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_fuzzer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
+#define TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+namespace webrtc {
+
+class AudioDecoder;
+
+enum class DecoderFunctionType {
+ kNormalDecode,
+ kRedundantDecode,
+};
+
+void FuzzAudioDecoder(DecoderFunctionType decode_type,
+ const uint8_t* data,
+ size_t size,
+ AudioDecoder* decoder,
+ int sample_rate_hz,
+ size_t max_decoded_bytes,
+ int16_t* decoded);
+
+} // namespace webrtc
+
+#endif // TEST_FUZZERS_AUDIO_DECODER_FUZZER_H_
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_g722_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_g722_fuzzer.cc
new file mode 100644
index 0000000000..08599aa333
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_g722_fuzzer.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/codecs/g722/audio_decoder_g722.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000 || size < 1) {
+ return;
+ }
+
+ std::unique_ptr<AudioDecoder> dec;
+ size_t num_channels;
+ if (data[0] % 2) {
+ dec = std::make_unique<AudioDecoderG722Impl>();
+ num_channels = 1;
+ } else {
+ dec = std::make_unique<AudioDecoderG722StereoImpl>();
+ num_channels = 2;
+ }
+ // Allocate a maximum output size of 100 ms.
+ const int sample_rate_hz = dec->SampleRateHz();
+ const size_t allocated_ouput_size_samples =
+ sample_rate_hz / 10 * num_channels;
+ std::unique_ptr<int16_t[]> output =
+ std::make_unique<int16_t[]>(allocated_ouput_size_samples);
+ FuzzAudioDecoder(
+ DecoderFunctionType::kNormalDecode, data, size, dec.get(), sample_rate_hz,
+ allocated_ouput_size_samples * sizeof(int16_t), output.get());
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc
new file mode 100644
index 0000000000..8548645c63
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_ilbc_fuzzer.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000) {
+ return;
+ }
+ AudioDecoderIlbcImpl dec;
+ static const int kSampleRateHz = 8000;
+ static const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10;
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec,
+ kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc
new file mode 100644
index 0000000000..474a1cdc43
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h"
+#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+
+AudioDecoderMultiChannelOpusConfig MakeDecoderConfig(
+ int num_channels,
+ int num_streams,
+ int coupled_streams,
+ std::vector<unsigned char> channel_mapping) {
+ AudioDecoderMultiChannelOpusConfig config;
+ config.num_channels = num_channels;
+ config.num_streams = num_streams;
+ config.coupled_streams = coupled_streams;
+ config.channel_mapping = channel_mapping;
+ return config;
+}
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const std::vector<AudioDecoderMultiChannelOpusConfig> surround_configs = {
+ MakeDecoderConfig(1, 1, 0, {0}), // Mono
+
+ MakeDecoderConfig(2, 2, 0, {0, 0}), // Copy the first (of
+ // 2) decoded streams
+ // into both output
+ // channel 0 and output
+ // channel 1. Ignore
+ // the 2nd decoded
+ // stream.
+
+ MakeDecoderConfig(4, 2, 2, {0, 1, 2, 3}), // Quad.
+ MakeDecoderConfig(6, 4, 2, {0, 4, 1, 2, 3, 5}), // 5.1
+ MakeDecoderConfig(8, 5, 3, {0, 6, 1, 2, 3, 4, 5, 7}) // 7.1
+ };
+
+ const auto config = surround_configs[data[0] % surround_configs.size()];
+ RTC_CHECK(config.IsOk());
+ std::unique_ptr<AudioDecoder> dec =
+ AudioDecoderMultiChannelOpus::MakeAudioDecoder(config);
+ RTC_CHECK(dec);
+ const int kSampleRateHz = 48000;
+ const size_t kAllocatedOuputSizeSamples =
+ 4 * kSampleRateHz / 10; // 4x100 ms, 4 times the size of the output array
+ // for the stereo Opus codec. It should be enough
+ // for 8 channels.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, dec.get(),
+ kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc
new file mode 100644
index 0000000000..a015f98b5b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_fuzzer.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const size_t channels = (size % 2) + 1; // 1 or 2 channels.
+ AudioDecoderOpusImpl dec(channels);
+ const int kSampleRateHz = 48000;
+ const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; // 100 ms.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec,
+ kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc
new file mode 100644
index 0000000000..efcba0f35d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const size_t channels = (size % 2) + 1; // 1 or 2 channels.
+ AudioDecoderOpusImpl dec(channels);
+ const int kSampleRateHz = 48000;
+ const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; // 100 ms.
+ int16_t output[kAllocatedOuputSizeSamples];
+ FuzzAudioDecoder(DecoderFunctionType::kRedundantDecode, data, size, &dec,
+ kSampleRateHz, sizeof(output), output);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc
new file mode 100644
index 0000000000..6e5d6e2190
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000 || size < 2) {
+ return;
+ }
+
+ int sample_rate_hz;
+ switch (data[0] % 4) {
+ case 0:
+ sample_rate_hz = 8000;
+ break;
+ case 1:
+ sample_rate_hz = 16000;
+ break;
+ case 2:
+ sample_rate_hz = 32000;
+ break;
+ case 3:
+ sample_rate_hz = 48000;
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return;
+ }
+ const size_t num_channels = data[1] % 16 + 1;
+
+ // Two first bytes of the data are used. Move forward.
+ data += 2;
+ size -= 2;
+
+ AudioDecoderPcm16B dec(sample_rate_hz, num_channels);
+ // Allocate a maximum output size of 100 ms.
+ const size_t allocated_ouput_size_samples =
+ sample_rate_hz * num_channels / 10;
+ std::unique_ptr<int16_t[]> output =
+ std::make_unique<int16_t[]>(allocated_ouput_size_samples);
+ FuzzAudioDecoder(
+ DecoderFunctionType::kNormalDecode, data, size, &dec, sample_rate_hz,
+ allocated_ouput_size_samples * sizeof(int16_t), output.get());
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm_fuzzer.cc
new file mode 100644
index 0000000000..dbfcaf9976
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_decoder_pcm_fuzzer.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
+#include "test/fuzzers/audio_decoder_fuzzer.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000 || size < 2) {
+ return;
+ }
+
+ const size_t num_channels = data[0] % 16 + 1;
+
+ std::unique_ptr<AudioDecoder> dec;
+ if (data[1] % 2) {
+ dec = std::make_unique<AudioDecoderPcmU>(num_channels);
+ } else {
+ dec = std::make_unique<AudioDecoderPcmA>(num_channels);
+ }
+
+ // Two first bytes of the data are used. Move forward.
+ data += 2;
+ size -= 2;
+
+ // Allocate a maximum output size of 100 ms.
+ const size_t allocated_ouput_size_samples =
+ dec->SampleRateHz() * num_channels / 10;
+ std::unique_ptr<int16_t[]> output =
+ std::make_unique<int16_t[]>(allocated_ouput_size_samples);
+ FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, dec.get(),
+ dec->SampleRateHz(),
+ allocated_ouput_size_samples * sizeof(int16_t),
+ output.get());
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.cc
new file mode 100644
index 0000000000..837c26df56
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fuzzers/audio_encoder_fuzzer.h"
+
+#include <cstring>
+
+#include "rtc_base/buffer.h"
+#include "rtc_base/checks.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+
+// This function reads bytes from `data_view`, interprets them as RTP timestamp
+// and input samples, and sends them for encoding. The process continues until
+// no more data is available.
+void FuzzAudioEncoder(rtc::ArrayView<const uint8_t> data_view,
+ std::unique_ptr<AudioEncoder> encoder) {
+ test::FuzzDataHelper data(data_view);
+ const size_t block_size_samples =
+ encoder->SampleRateHz() / 100 * encoder->NumChannels();
+ const size_t block_size_bytes = block_size_samples * sizeof(int16_t);
+ if (data_view.size() / block_size_bytes > 1000) {
+ // If the size of the fuzzer data is more than 1000 input blocks (i.e., more
+ // than 10 seconds), then don't fuzz at all for the fear of timing out.
+ return;
+ }
+
+ rtc::BufferT<int16_t> input_aligned(block_size_samples);
+ rtc::Buffer encoded;
+
+ // Each round in the loop below will need one block of samples + a 32-bit
+ // timestamp from the fuzzer input.
+ const size_t bytes_to_read = block_size_bytes + sizeof(uint32_t);
+ while (data.CanReadBytes(bytes_to_read)) {
+ const uint32_t timestamp = data.Read<uint32_t>();
+ auto byte_array = data.ReadByteArray(block_size_bytes);
+ // Align the data by copying to another array.
+ RTC_DCHECK_EQ(input_aligned.size() * sizeof(int16_t),
+ byte_array.size() * sizeof(uint8_t));
+ memcpy(input_aligned.data(), byte_array.data(), byte_array.size());
+ auto info = encoder->Encode(timestamp, input_aligned, &encoded);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.h b/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.h
new file mode 100644
index 0000000000..0c879df4d3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_encoder_fuzzer.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_
+#define TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "api/audio_codecs/audio_encoder.h"
+
+namespace webrtc {
+
+void FuzzAudioEncoder(rtc::ArrayView<const uint8_t> data_view,
+ std::unique_ptr<AudioEncoder> encoder);
+
+} // namespace webrtc
+
+#endif // TEST_FUZZERS_AUDIO_ENCODER_FUZZER_H_
diff --git a/third_party/libwebrtc/test/fuzzers/audio_encoder_opus_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_encoder_opus_fuzzer.cc
new file mode 100644
index 0000000000..d67e6d6067
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_encoder_opus_fuzzer.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/audio_codecs/opus/audio_encoder_opus.h"
+#include "rtc_base/checks.h"
+#include "test/fuzzers/audio_encoder_fuzzer.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ AudioEncoderOpus::Config config;
+ config.frame_size_ms = 20;
+ RTC_CHECK(config.IsOk());
+ constexpr int kPayloadType = 100;
+ FuzzAudioEncoder(
+ /*data_view=*/{data, size},
+ /*encoder=*/AudioEncoderOpus::MakeAudioEncoder(config, kPayloadType));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_processing_configs_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_processing_configs_fuzzer.cc
new file mode 100644
index 0000000000..afcb4318f9
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_processing_configs_fuzzer.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <bitset>
+#include <string>
+
+#include "absl/memory/memory.h"
+#include "api/audio/echo_canceller3_factory.h"
+#include "api/audio/echo_detector_creator.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "modules/audio_processing/aec_dump/aec_dump_factory.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/audio_processing/test/audio_processing_builder_for_testing.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/numerics/safe_minmax.h"
+#include "rtc_base/task_queue.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/fuzzers/audio_processing_fuzzer_helper.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace {
+
+const std::string kFieldTrialNames[] = {
+ "WebRTC-Audio-Agc2ForceExtraSaturationMargin",
+ "WebRTC-Audio-Agc2ForceInitialSaturationMargin",
+ "WebRTC-Aec3MinErleDuringOnsetsKillSwitch",
+ "WebRTC-Aec3ShortHeadroomKillSwitch",
+};
+
+rtc::scoped_refptr<AudioProcessing> CreateApm(test::FuzzDataHelper* fuzz_data,
+ std::string* field_trial_string,
+ rtc::TaskQueue* worker_queue) {
+ // Parse boolean values for optionally enabling different
+ // configurable public components of APM.
+ bool use_ts = fuzz_data->ReadOrDefaultValue(true);
+ bool use_red = fuzz_data->ReadOrDefaultValue(true);
+ bool use_hpf = fuzz_data->ReadOrDefaultValue(true);
+ bool use_aec3 = fuzz_data->ReadOrDefaultValue(true);
+ bool use_aec = fuzz_data->ReadOrDefaultValue(true);
+ bool use_aecm = fuzz_data->ReadOrDefaultValue(true);
+ bool use_agc = fuzz_data->ReadOrDefaultValue(true);
+ bool use_ns = fuzz_data->ReadOrDefaultValue(true);
+ bool use_agc_limiter = fuzz_data->ReadOrDefaultValue(true);
+ bool use_agc2 = fuzz_data->ReadOrDefaultValue(true);
+ bool use_agc2_adaptive_digital = fuzz_data->ReadOrDefaultValue(true);
+
+ // Read a gain value supported by GainController2::Validate().
+ const float gain_controller2_gain_db =
+ fuzz_data->ReadOrDefaultValue<uint8_t>(0) % 50;
+
+ constexpr size_t kNumFieldTrials = arraysize(kFieldTrialNames);
+ // Verify that the read data type has enough bits to fuzz the field trials.
+ using FieldTrialBitmaskType = uint64_t;
+ static_assert(kNumFieldTrials <= sizeof(FieldTrialBitmaskType) * 8,
+ "FieldTrialBitmaskType is not large enough.");
+ std::bitset<kNumFieldTrials> field_trial_bitmask(
+ fuzz_data->ReadOrDefaultValue<FieldTrialBitmaskType>(0));
+ for (size_t i = 0; i < kNumFieldTrials; ++i) {
+ if (field_trial_bitmask[i]) {
+ *field_trial_string += kFieldTrialNames[i] + "/Enabled/";
+ }
+ }
+ field_trial::InitFieldTrialsFromString(field_trial_string->c_str());
+
+ // Ignore a few bytes. Bytes from this segment will be used for
+ // future config flag changes. We assume 40 bytes is enough for
+ // configuring the APM.
+ constexpr size_t kSizeOfConfigSegment = 40;
+ RTC_DCHECK(kSizeOfConfigSegment >= fuzz_data->BytesRead());
+ static_cast<void>(
+ fuzz_data->ReadByteArray(kSizeOfConfigSegment - fuzz_data->BytesRead()));
+
+ // Filter out incompatible settings that lead to CHECK failures.
+ if ((use_aecm && use_aec) || // These settings cause CHECK failure.
+ (use_aecm && use_aec3 && use_ns) // These settings trigger webrtc:9489.
+ ) {
+ return nullptr;
+ }
+
+ std::unique_ptr<EchoControlFactory> echo_control_factory;
+ if (use_aec3) {
+ echo_control_factory.reset(new EchoCanceller3Factory());
+ }
+
+ webrtc::AudioProcessing::Config apm_config;
+ apm_config.pipeline.multi_channel_render = true;
+ apm_config.pipeline.multi_channel_capture = true;
+ apm_config.echo_canceller.enabled = use_aec || use_aecm;
+ apm_config.echo_canceller.mobile_mode = use_aecm;
+ apm_config.high_pass_filter.enabled = use_hpf;
+ apm_config.gain_controller1.enabled = use_agc;
+ apm_config.gain_controller1.enable_limiter = use_agc_limiter;
+ apm_config.gain_controller2.enabled = use_agc2;
+ apm_config.gain_controller2.fixed_digital.gain_db = gain_controller2_gain_db;
+ apm_config.gain_controller2.adaptive_digital.enabled =
+ use_agc2_adaptive_digital;
+ apm_config.noise_suppression.enabled = use_ns;
+ apm_config.transient_suppression.enabled = use_ts;
+
+ rtc::scoped_refptr<AudioProcessing> apm =
+ AudioProcessingBuilderForTesting()
+ .SetEchoControlFactory(std::move(echo_control_factory))
+ .SetEchoDetector(use_red ? CreateEchoDetector() : nullptr)
+ .SetConfig(apm_config)
+ .Create();
+
+#ifdef WEBRTC_LINUX
+ apm->AttachAecDump(AecDumpFactory::Create("/dev/null", -1, worker_queue));
+#endif
+
+ return apm;
+}
+
+TaskQueueFactory* GetTaskQueueFactory() {
+ static TaskQueueFactory* const factory =
+ CreateDefaultTaskQueueFactory().release();
+ return factory;
+}
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 400000) {
+ return;
+ }
+ test::FuzzDataHelper fuzz_data(rtc::ArrayView<const uint8_t>(data, size));
+ // This string must be in scope during execution, according to documentation
+ // for field_trial.h. Hence it's created here and not in CreateApm.
+ std::string field_trial_string = "";
+
+ rtc::TaskQueue worker_queue(GetTaskQueueFactory()->CreateTaskQueue(
+ "rtc-low-prio", rtc::TaskQueue::Priority::LOW));
+ auto apm = CreateApm(&fuzz_data, &field_trial_string, &worker_queue);
+
+ if (apm) {
+ FuzzAudioProcessing(&fuzz_data, std::move(apm));
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.cc b/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.cc
new file mode 100644
index 0000000000..5252918d77
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fuzzers/audio_processing_fuzzer_helper.h"
+
+#include <algorithm>
+#include <array>
+#include <cmath>
+#include <limits>
+
+#include "api/audio/audio_frame.h"
+#include "modules/audio_processing/include/audio_frame_proxies.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+bool ValidForApm(float x) {
+ return std::isfinite(x) && -1.0f <= x && x <= 1.0f;
+}
+
+void GenerateFloatFrame(test::FuzzDataHelper* fuzz_data,
+ int input_rate,
+ int num_channels,
+ float* const* float_frames) {
+ const int samples_per_input_channel =
+ AudioProcessing::GetFrameSize(input_rate);
+ RTC_DCHECK_LE(samples_per_input_channel, 480);
+ for (int i = 0; i < num_channels; ++i) {
+ std::fill(float_frames[i], float_frames[i] + samples_per_input_channel, 0);
+ const size_t read_bytes = sizeof(float) * samples_per_input_channel;
+ if (fuzz_data->CanReadBytes(read_bytes)) {
+ rtc::ArrayView<const uint8_t> byte_array =
+ fuzz_data->ReadByteArray(read_bytes);
+ memmove(float_frames[i], byte_array.begin(), read_bytes);
+ }
+
+ // Sanitize input.
+ for (int j = 0; j < samples_per_input_channel; ++j) {
+ if (!ValidForApm(float_frames[i][j])) {
+ float_frames[i][j] = 0.f;
+ }
+ }
+ }
+}
+
+void GenerateFixedFrame(test::FuzzDataHelper* fuzz_data,
+ int input_rate,
+ int num_channels,
+ AudioFrame* fixed_frame) {
+ const int samples_per_input_channel =
+ AudioProcessing::GetFrameSize(input_rate);
+
+ fixed_frame->samples_per_channel_ = samples_per_input_channel;
+ fixed_frame->sample_rate_hz_ = input_rate;
+ fixed_frame->num_channels_ = num_channels;
+
+ RTC_DCHECK_LE(samples_per_input_channel * num_channels,
+ AudioFrame::kMaxDataSizeSamples);
+ for (int i = 0; i < samples_per_input_channel * num_channels; ++i) {
+ fixed_frame->mutable_data()[i] = fuzz_data->ReadOrDefaultValue<int16_t>(0);
+ }
+}
+} // namespace
+
+void FuzzAudioProcessing(test::FuzzDataHelper* fuzz_data,
+ rtc::scoped_refptr<AudioProcessing> apm) {
+ AudioFrame fixed_frame;
+ // Normal usage is up to 8 channels. Allowing to fuzz one beyond this allows
+ // us to catch implicit assumptions about normal usage.
+ constexpr int kMaxNumChannels = 9;
+ std::array<std::array<float, 480>, kMaxNumChannels> float_frames;
+ std::array<float*, kMaxNumChannels> float_frame_ptrs;
+ for (int i = 0; i < kMaxNumChannels; ++i) {
+ float_frame_ptrs[i] = float_frames[i].data();
+ }
+ float* const* ptr_to_float_frames = &float_frame_ptrs[0];
+
+ constexpr int kSampleRatesHz[] = {8000, 11025, 16000, 22050,
+ 32000, 44100, 48000};
+
+ // We may run out of fuzz data in the middle of a loop iteration. In
+ // that case, default values will be used for the rest of that
+ // iteration.
+ while (fuzz_data->CanReadBytes(1)) {
+ const bool is_float = fuzz_data->ReadOrDefaultValue(true);
+ // Decide input/output rate for this iteration.
+ const int input_rate = fuzz_data->SelectOneOf(kSampleRatesHz);
+ const int output_rate = fuzz_data->SelectOneOf(kSampleRatesHz);
+
+ const uint8_t stream_delay = fuzz_data->ReadOrDefaultValue<uint8_t>(0);
+ // API call needed for AECM to run.
+ apm->set_stream_delay_ms(stream_delay);
+
+ const bool key_pressed = fuzz_data->ReadOrDefaultValue(true);
+ apm->set_stream_key_pressed(key_pressed);
+
+ // Make the APM call depending on capture/render mode and float /
+ // fix interface.
+ const bool is_capture = fuzz_data->ReadOrDefaultValue(true);
+
+ // Fill the arrays with audio samples from the data.
+ int apm_return_code = AudioProcessing::Error::kNoError;
+ if (is_float) {
+ const int num_channels =
+ fuzz_data->ReadOrDefaultValue<uint8_t>(1) % kMaxNumChannels;
+
+ GenerateFloatFrame(fuzz_data, input_rate, num_channels,
+ ptr_to_float_frames);
+ if (is_capture) {
+ apm_return_code = apm->ProcessStream(
+ ptr_to_float_frames, StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), ptr_to_float_frames);
+ } else {
+ apm_return_code = apm->ProcessReverseStream(
+ ptr_to_float_frames, StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), ptr_to_float_frames);
+ }
+ } else {
+ const int num_channels = fuzz_data->ReadOrDefaultValue(true) ? 2 : 1;
+ GenerateFixedFrame(fuzz_data, input_rate, num_channels, &fixed_frame);
+
+ if (is_capture) {
+ apm_return_code = ProcessAudioFrame(apm.get(), &fixed_frame);
+ } else {
+ apm_return_code = ProcessReverseAudioFrame(apm.get(), &fixed_frame);
+ }
+ }
+
+ // Cover stats gathering code paths.
+ static_cast<void>(apm->GetStatistics(true /*has_remote_tracks*/));
+
+ RTC_DCHECK_NE(apm_return_code, AudioProcessing::kBadDataLengthError);
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.h b/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.h
new file mode 100644
index 0000000000..a604db8cef
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_processing_fuzzer_helper.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUZZERS_AUDIO_PROCESSING_FUZZER_HELPER_H_
+#define TEST_FUZZERS_AUDIO_PROCESSING_FUZZER_HELPER_H_
+
+#include <memory>
+
+#include "modules/audio_processing/include/audio_processing.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+namespace webrtc {
+
+void FuzzAudioProcessing(test::FuzzDataHelper* fuzz_data,
+ rtc::scoped_refptr<AudioProcessing> apm);
+
+} // namespace webrtc
+
+#endif // TEST_FUZZERS_AUDIO_PROCESSING_FUZZER_HELPER_H_
diff --git a/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc
new file mode 100644
index 0000000000..ca3946988c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/audio_processing_sample_rate_fuzzer.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <array>
+#include <cmath>
+#include <limits>
+
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/audio_processing/test/audio_processing_builder_for_testing.h"
+#include "rtc_base/checks.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace {
+constexpr int kMaxNumChannels = 2;
+// APM supported max rate is 384000 Hz, using a limit slightly above lets the
+// fuzzer exercise the handling of too high rates.
+constexpr int kMaxSampleRateHz = 400000;
+constexpr int kMaxSamplesPerChannel = kMaxSampleRateHz / 100;
+
+void GenerateFloatFrame(test::FuzzDataHelper& fuzz_data,
+ int input_rate,
+ int num_channels,
+ float* const* float_frames) {
+ const int samples_per_input_channel =
+ AudioProcessing::GetFrameSize(input_rate);
+ RTC_DCHECK_LE(samples_per_input_channel, kMaxSamplesPerChannel);
+ for (int i = 0; i < num_channels; ++i) {
+ float channel_value;
+ fuzz_data.CopyTo<float>(&channel_value);
+ std::fill(float_frames[i], float_frames[i] + samples_per_input_channel,
+ channel_value);
+ }
+}
+
+void GenerateFixedFrame(test::FuzzDataHelper& fuzz_data,
+ int input_rate,
+ int num_channels,
+ int16_t* fixed_frames) {
+ const int samples_per_input_channel =
+ AudioProcessing::GetFrameSize(input_rate);
+ RTC_DCHECK_LE(samples_per_input_channel, kMaxSamplesPerChannel);
+ // Write interleaved samples.
+ for (int ch = 0; ch < num_channels; ++ch) {
+ const int16_t channel_value = fuzz_data.ReadOrDefaultValue<int16_t>(0);
+ for (int i = ch; i < samples_per_input_channel * num_channels;
+ i += num_channels) {
+ fixed_frames[i] = channel_value;
+ }
+ }
+}
+
+// No-op processor used to influence APM input/output pipeline decisions based
+// on what submodules are present.
+class NoopCustomProcessing : public CustomProcessing {
+ public:
+ NoopCustomProcessing() {}
+ ~NoopCustomProcessing() override {}
+ void Initialize(int sample_rate_hz, int num_channels) override {}
+ void Process(AudioBuffer* audio) override {}
+ std::string ToString() const override { return ""; }
+ void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting) override {}
+};
+} // namespace
+
+// This fuzzer is directed at fuzzing unexpected input and output sample rates
+// of APM. For example, the sample rate 22050 Hz is processed by APM in frames
+// of floor(22050/100) = 220 samples. This is not exactly 10 ms of audio
+// content, and may break assumptions commonly made on the APM frame size.
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 100) {
+ return;
+ }
+ test::FuzzDataHelper fuzz_data(rtc::ArrayView<const uint8_t>(data, size));
+
+ std::unique_ptr<CustomProcessing> capture_processor =
+ fuzz_data.ReadOrDefaultValue(true)
+ ? std::make_unique<NoopCustomProcessing>()
+ : nullptr;
+ std::unique_ptr<CustomProcessing> render_processor =
+ fuzz_data.ReadOrDefaultValue(true)
+ ? std::make_unique<NoopCustomProcessing>()
+ : nullptr;
+ rtc::scoped_refptr<AudioProcessing> apm =
+ AudioProcessingBuilderForTesting()
+ .SetConfig({.pipeline = {.multi_channel_render = true,
+ .multi_channel_capture = true}})
+ .SetCapturePostProcessing(std::move(capture_processor))
+ .SetRenderPreProcessing(std::move(render_processor))
+ .Create();
+ RTC_DCHECK(apm);
+
+ std::array<int16_t, kMaxSamplesPerChannel * kMaxNumChannels> fixed_frame;
+ std::array<std::array<float, kMaxSamplesPerChannel>, kMaxNumChannels>
+ float_frames;
+ std::array<float*, kMaxNumChannels> float_frame_ptrs;
+ for (int i = 0; i < kMaxNumChannels; ++i) {
+ float_frame_ptrs[i] = float_frames[i].data();
+ }
+ float* const* ptr_to_float_frames = &float_frame_ptrs[0];
+
+ // Choose whether to fuzz the float or int16_t interfaces of APM.
+ const bool is_float = fuzz_data.ReadOrDefaultValue(true);
+
+ // We may run out of fuzz data in the middle of a loop iteration. In
+ // that case, default values will be used for the rest of that
+ // iteration.
+ while (fuzz_data.CanReadBytes(1)) {
+ // Decide input/output rate for this iteration.
+ const int input_rate = static_cast<int>(
+ fuzz_data.ReadOrDefaultValue<size_t>(8000) % kMaxSampleRateHz);
+ const int output_rate = static_cast<int>(
+ fuzz_data.ReadOrDefaultValue<size_t>(8000) % kMaxSampleRateHz);
+ const int num_channels = fuzz_data.ReadOrDefaultValue(true) ? 2 : 1;
+
+ // Since render and capture calls have slightly different reinitialization
+ // procedures, we let the fuzzer choose the order.
+ const bool is_capture = fuzz_data.ReadOrDefaultValue(true);
+
+ int apm_return_code = AudioProcessing::Error::kNoError;
+ if (is_float) {
+ GenerateFloatFrame(fuzz_data, input_rate, num_channels,
+ ptr_to_float_frames);
+
+ if (is_capture) {
+ apm_return_code = apm->ProcessStream(
+ ptr_to_float_frames, StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), ptr_to_float_frames);
+ } else {
+ apm_return_code = apm->ProcessReverseStream(
+ ptr_to_float_frames, StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), ptr_to_float_frames);
+ }
+ } else {
+ GenerateFixedFrame(fuzz_data, input_rate, num_channels,
+ fixed_frame.data());
+
+ if (is_capture) {
+ apm_return_code = apm->ProcessStream(
+ fixed_frame.data(), StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), fixed_frame.data());
+ } else {
+ apm_return_code = apm->ProcessReverseStream(
+ fixed_frame.data(), StreamConfig(input_rate, num_channels),
+ StreamConfig(output_rate, num_channels), fixed_frame.data());
+ }
+ }
+ // APM may flag an error on unsupported audio formats, but should not crash.
+ RTC_DCHECK(apm_return_code == AudioProcessing::kNoError ||
+ apm_return_code == AudioProcessing::kBadSampleRateError);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/comfort_noise_decoder_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/comfort_noise_decoder_fuzzer.cc
new file mode 100644
index 0000000000..7f44af99fb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/comfort_noise_decoder_fuzzer.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "api/array_view.h"
+#include "modules/audio_coding/codecs/cng/webrtc_cng.h"
+#include "rtc_base/buffer.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+void FuzzOneInputTest(rtc::ArrayView<const uint8_t> data) {
+ FuzzDataHelper fuzz_data(data);
+ ComfortNoiseDecoder cng_decoder;
+
+ while (1) {
+ if (!fuzz_data.CanReadBytes(1))
+ break;
+ const uint8_t sid_frame_len = fuzz_data.Read<uint8_t>();
+ auto sid_frame = fuzz_data.ReadByteArray(sid_frame_len);
+ if (sid_frame.empty())
+ break;
+ cng_decoder.UpdateSid(sid_frame);
+ if (!fuzz_data.CanReadBytes(3))
+ break;
+ constexpr bool kTrueOrFalse[] = {true, false};
+ const bool new_period = fuzz_data.SelectOneOf(kTrueOrFalse);
+ constexpr size_t kOutputSizes[] = {80, 160, 320, 480};
+ const size_t output_size = fuzz_data.SelectOneOf(kOutputSizes);
+ const size_t num_generate_calls =
+ std::min(fuzz_data.Read<uint8_t>(), static_cast<uint8_t>(17));
+ rtc::BufferT<int16_t> output(output_size);
+ for (size_t i = 0; i < num_generate_calls; ++i) {
+ cng_decoder.Generate(output, new_period);
+ }
+ }
+}
+
+} // namespace
+} // namespace test
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 5000) {
+ return;
+ }
+ test::FuzzOneInputTest(rtc::ArrayView<const uint8_t>(data, size));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_fec_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_fec_config.json
new file mode 100644
index 0000000000..59b5db9446
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_fec_config.json
@@ -0,0 +1,151 @@
+[
+ {
+ "decoders" : [
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "1"
+ },
+ {
+ "profile-level-id" : "42001f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 100
+ },
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "0"
+ },
+ {
+ "profile-level-id" : "42001f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 102
+ },
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "1"
+ },
+ {
+ "profile-level-id" : "42e01f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 127
+ },
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "0"
+ },
+ {
+ "profile-level-id" : "42e01f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 125
+ },
+ {
+ "codec_params" : [],
+ "payload_name" : "VP8",
+ "payload_type" : 96
+ },
+ {
+ "codec_params" : [],
+ "payload_name" : "VP9",
+ "payload_type" : 98
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [
+ {
+ "encrypt" : false,
+ "id" : 5,
+ "uri" : "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+ },
+ {
+ "encrypt" : false,
+ "id" : 3,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"
+ },
+ {
+ "encrypt" : false,
+ "id" : 6,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"
+ },
+ {
+ "encrypt" : false,
+ "id" : 7,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"
+ },
+ {
+ "encrypt" : false,
+ "id" : 8,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"
+ },
+ {
+ "encrypt" : false,
+ "id" : 4,
+ "uri" : "urn:3gpp:video-orientation"
+ },
+ {
+ "encrypt" : false,
+ "id" : 2,
+ "uri" : "urn:ietf:params:rtp-hdrext:toffset"
+ }
+ ],
+ "local_ssrc" : 1,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : -1,
+ "remb" : true,
+ "remote_ssrc" : 2736493666,
+ "rtcp_mode" : "RtcpMode::kReducedSize",
+ "rtx_payload_types" : [
+ {
+ "97" : 96
+ },
+ {
+ "99" : 98
+ },
+ {
+ "101" : 100
+ },
+ {
+ "107" : 125
+ },
+ {
+ "122" : 127
+ },
+ {
+ "123" : 102
+ }
+ ],
+ "rtx_ssrc" : 885796452,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+ }
+]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_non_interleaved_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_non_interleaved_config.json
new file mode 100644
index 0000000000..9cb5bd767b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_non_interleaved_config.json
@@ -0,0 +1,66 @@
+[
+ {
+ "decoders" : [
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "0"
+ },
+ {
+ "packetization-mode" : "0"
+ },
+ {
+ "profile-level-id" : "42001f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 100
+ },
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "1"
+ },
+ {
+ "profile-level-id" : "42e01f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 102
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [],
+ "local_ssrc" : 1,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : 125,
+ "remb" : true,
+ "remote_ssrc" : 1989790381,
+ "rtcp_mode" : "RtcpMode::kReducedSize",
+ "rtx_payload_types" : [
+ {
+ "101" : 100
+ },
+ {
+ "122" : 125
+ },
+ {
+ "123" : 127
+ }
+ ],
+ "rtx_ssrc" : 1406083315,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+ }
+]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_single_nal_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_single_nal_config.json
new file mode 100644
index 0000000000..f3ae6bbafc
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/h264_single_nal_config.json
@@ -0,0 +1,66 @@
+[
+ {
+ "decoders" : [
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "1"
+ },
+ {
+ "profile-level-id" : "42001f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 100
+ },
+ {
+ "codec_params" : [
+ {
+ "level-asymmetry-allowed" : "1"
+ },
+ {
+ "packetization-mode" : "1"
+ },
+ {
+ "profile-level-id" : "42e01f"
+ }
+ ],
+ "payload_name" : "H264",
+ "payload_type" : 102
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [],
+ "local_ssrc" : 1,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : 125,
+ "remb" : true,
+ "remote_ssrc" : 1989790381,
+ "rtcp_mode" : "RtcpMode::kReducedSize",
+ "rtx_payload_types" : [
+ {
+ "101" : 100
+ },
+ {
+ "122" : 125
+ },
+ {
+ "123" : 127
+ }
+ ],
+ "rtx_ssrc" : 1406083315,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+ }
+]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_config.json
new file mode 100644
index 0000000000..0a5eef8b09
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_config.json
@@ -0,0 +1,29 @@
+[{
+ "decoders" : [
+ {
+ "codec_params" : [],
+ "payload_name" : "VP8",
+ "payload_type" : 125
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [],
+ "local_ssrc" : 7331,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : -1,
+ "remb" : true,
+ "remote_ssrc" : 1337,
+ "rtcp_mode" : "RtcpMode::kCompound",
+ "rtx_payload_types" : [],
+ "rtx_ssrc" : 100,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+}]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_fec_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_fec_config.json
new file mode 100644
index 0000000000..3d2b66301d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp8_fec_config.json
@@ -0,0 +1,73 @@
+[
+ {
+ "decoders" : [
+ {
+ "codec_params" : [],
+ "payload_name" : "VP8",
+ "payload_type" : 96
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [
+ {
+ "encrypt" : false,
+ "id" : 5,
+ "uri" : "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+ },
+ {
+ "encrypt" : false,
+ "id" : 3,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"
+ },
+ {
+ "encrypt" : false,
+ "id" : 6,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"
+ },
+ {
+ "encrypt" : false,
+ "id" : 7,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"
+ },
+ {
+ "encrypt" : false,
+ "id" : 8,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"
+ },
+ {
+ "encrypt" : false,
+ "id" : 4,
+ "uri" : "urn:3gpp:video-orientation"
+ },
+ {
+ "encrypt" : false,
+ "id" : 2,
+ "uri" : "urn:ietf:params:rtp-hdrext:toffset"
+ }
+ ],
+ "local_ssrc" : 1,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : -1,
+ "remb" : true,
+ "remote_ssrc" : 2672243158,
+ "rtcp_mode" : "RtcpMode::kReducedSize",
+ "rtx_payload_types" : [
+ {
+ "97" : 96
+ },
+ {
+ "99" : 98
+ }
+ ],
+ "rtx_ssrc" : 1807563126,
+ "transport_cc" : true
+ },
+ "target_delay_ms" : 0
+ }
+]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_config.json
new file mode 100644
index 0000000000..a9c88a2402
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_config.json
@@ -0,0 +1,29 @@
+[{
+ "decoders" : [
+ {
+ "codec_params" : [],
+ "payload_name" : "VP9",
+ "payload_type" : 124
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [],
+ "local_ssrc" : 7331,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : -1,
+ "remb" : true,
+ "remote_ssrc" : 1337,
+ "rtcp_mode" : "RtcpMode::kCompound",
+ "rtx_payload_types" : [],
+ "rtx_ssrc" : 100,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+}]
diff --git a/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_fec_config.json b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_fec_config.json
new file mode 100644
index 0000000000..d089aa8fec
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/configs/replay_packet_fuzzer/vp9_fec_config.json
@@ -0,0 +1,79 @@
+[
+ {
+ "decoders" : [
+ {
+ "codec_params" : [],
+ "payload_name" : "VP9",
+ "payload_type" : 98
+ },
+ {
+ "codec_params" : [],
+ "payload_name" : "VP8",
+ "payload_type" : 96
+ }
+ ],
+ "render_delay_ms" : 10,
+ "rtp" : {
+ "extensions" : [
+ {
+ "encrypt" : false,
+ "id" : 5,
+ "uri" : "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+ },
+ {
+ "encrypt" : false,
+ "id" : 3,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"
+ },
+ {
+ "encrypt" : false,
+ "id" : 6,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"
+ },
+ {
+ "encrypt" : false,
+ "id" : 7,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"
+ },
+ {
+ "encrypt" : false,
+ "id" : 8,
+ "uri" : "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"
+ },
+ {
+ "encrypt" : false,
+ "id" : 4,
+ "uri" : "urn:3gpp:video-orientation"
+ },
+ {
+ "encrypt" : false,
+ "id" : 2,
+ "uri" : "urn:ietf:params:rtp-hdrext:toffset"
+ }
+ ],
+ "local_ssrc" : 1,
+ "lntf" : {
+ "enabled": false,
+ },
+ "nack" : {
+ "rtp_history_ms" : 1000
+ },
+ "red_payload_type" : -1,
+ "remb" : true,
+ "remote_ssrc" : 2678204013,
+ "rtcp_mode" : "RtcpMode::kReducedSize",
+ "rtx_payload_types" : [
+ {
+ "97" : 96
+ },
+ {
+ "99" : 98
+ }
+ ],
+ "rtx_ssrc" : 1110725867,
+ "transport_cc" : true,
+ "ulpfec_payload_type" : -1
+ },
+ "target_delay_ms" : 0
+ }
+]
diff --git a/third_party/libwebrtc/test/fuzzers/congestion_controller_feedback_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/congestion_controller_feedback_fuzzer.cc
new file mode 100644
index 0000000000..e266b85fa4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/congestion_controller_feedback_fuzzer.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "absl/functional/bind_front.h"
+#include "modules/congestion_controller/include/receive_side_congestion_controller.h"
+#include "modules/pacing/packet_router.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ size_t i = 0;
+ if (size < sizeof(int64_t) + sizeof(uint8_t) + sizeof(uint32_t))
+ return;
+ SimulatedClock clock(data[i++]);
+ PacketRouter packet_router;
+ ReceiveSideCongestionController cc(
+ &clock,
+ absl::bind_front(&PacketRouter::SendCombinedRtcpPacket, &packet_router),
+ absl::bind_front(&PacketRouter::SendRemb, &packet_router), nullptr);
+ RTPHeader header;
+ header.ssrc = ByteReader<uint32_t>::ReadBigEndian(&data[i]);
+ i += sizeof(uint32_t);
+ header.extension.hasTransportSequenceNumber = true;
+ int64_t arrival_time_ms = std::min<int64_t>(
+ std::max<int64_t>(ByteReader<int64_t>::ReadBigEndian(&data[i]), 0),
+ std::numeric_limits<int64_t>::max() / 2);
+ i += sizeof(int64_t);
+ const size_t kMinPacketSize =
+ sizeof(size_t) + sizeof(uint16_t) + sizeof(uint8_t);
+ while (i + kMinPacketSize < size) {
+ size_t payload_size = ByteReader<size_t>::ReadBigEndian(&data[i]) % 1500;
+ i += sizeof(size_t);
+ header.extension.transportSequenceNumber =
+ ByteReader<uint16_t>::ReadBigEndian(&data[i]);
+ i += sizeof(uint16_t);
+ cc.OnReceivedPacket(arrival_time_ms, payload_size, header);
+ clock.AdvanceTimeMilliseconds(5);
+ arrival_time_ms += ByteReader<uint8_t>::ReadBigEndian(&data[i]);
+ i += sizeof(uint8_t);
+ cc.MaybeProcess();
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/README b/third_party/libwebrtc/test/fuzzers/corpora/README
new file mode 100644
index 0000000000..cc87025ff6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/README
@@ -0,0 +1,37 @@
+This is a collection of corpora for various WebRTC fuzzers. To use
+them, the gn targets define seed_corpus=$corpus_dir, which causes the
+ClusterFuzz upload bot to bundle $corpus_dir and upload it.
+
+The format is simple: one file per test case. Specific notes are
+included below.
+
+### SDP ###
+This corpus was initially assembled manually from the following
+sources:
+
+ - curl --silent https://www.ietf.org/rfc/rfc4317.txt | grep '^[ a-z]*=[^=]*$' | sed 's/^[[:space:]]*//' | awk -v RS='(^|\n)v=' '/./ {print "v="$0 > NR".sdp"}'
+ - all the SDPs used in the parser unit tests
+ - some manually gathered SDPs from Firefox and Opera
+
+The SDP tokens come from:
+
+ - grep "^static const " webrtc/api/webrtcsdp.cc | cut -d'=' -f2 | cut -d ';' -f1 | tr -d '"' | tr -d "'" | tr -d ' ' | sort -u | grep -v '^(\n|\r|\r\n)$|^$' | sed -e 's/^/"/' -e 's/$/"/' | tail -n +2
+
+### STUN ###
+This corpus was initially assembled from the STUN unit tests, together
+with a crash that it found relatively quickly.
+
+### RT(C)P ###
+This corpus was initially assembled from the unittests. RTCP was
+minimised first.
+
+There is also rt(c?)p-corpus-with-extra-byte, in which each sample is
+prefixed by the byte 0xff. Some of the rtp fuzzers need to decide
+which header extensions to enable, and the first byte of the fuzz data
+is used for this.
+
+### PseudoTCP ###
+Very small corpus minimised from the unit tests.
+
+### SCTP ###
+This corpus was extracted from a few manually recorder wireshark dumps.
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/aec3-config-json-corpus/default.json b/third_party/libwebrtc/test/fuzzers/corpora/aec3-config-json-corpus/default.json
new file mode 100644
index 0000000000..4dcbcee53a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/aec3-config-json-corpus/default.json
@@ -0,0 +1 @@
+{"aec3": {"buffering": {"excess_render_detection_interval_blocks": 250,"max_allowed_excess_render_blocks": 8},"delay": {"default_delay": 5,"down_sampling_factor": 4,"num_filters": 5,"delay_headroom_samples": 32,"hysteresis_limit_blocks": 1,"fixed_capture_delay_samples": 0,"delay_estimate_smoothing": 0.7,"delay_candidate_detection_threshold": 0.2,"delay_selection_thresholds": {"initial": 5,"converged": 20},"use_external_delay_estimator": false,"downmix_before_delay_estimation": true,"log_warning_on_delay_changes": false},"filter": {"main": [13,5e-05,0.05,0.001,2,2.00753e+07],"shadow": [13,0.7,2.00753e+07],"main_initial": [12,0.005,0.5,0.001,2,2.00753e+07],"shadow_initial": [12,0.9,2.00753e+07],"config_change_duration_blocks": 250,"initial_state_seconds": 2.5,"conservative_initial_phase": false,"enable_shadow_filter_output_usage": true},"erle": {"min": 1,"max_l": 4,"max_h": 1.5,"onset_detection": true,"num_sections": 1,"clamp_quality_estimate_to_zero": true,"clamp_quality_estimate_to_one": true},"ep_strength": {"default_gain": 1,"default_len": 0.83,"echo_can_saturate": true,"bounded_erl": false},"echo_audibility": {"low_render_limit": 256,"normal_render_limit": 64,"floor_power": 128,"audibility_threshold_lf": 10,"audibility_threshold_mf": 10,"audibility_threshold_hf": 10,"use_stationarity_properties": false,"use_stationarity_properties_at_init": false},"render_levels": {"active_render_limit": 100,"poor_excitation_render_limit": 150,"poor_excitation_render_limit_ds8": 20},"echo_removal_control": {"has_clock_drift": false,"linear_and_stable_echo_path": false},"echo_model": {"noise_floor_hold": 50,"min_noise_floor_power": 1.6384e+06,"stationary_gate_slope": 10,"noise_gate_power": 27509.4,"noise_gate_slope": 0.3,"render_pre_window_size": 1,"render_post_window_size": 1},"suppressor": {"nearend_average_blocks": 4,"normal_tuning": {"mask_lf": [0.3,0.4,0.3],"mask_hf": [0.07,0.1,0.3],"max_inc_factor": 2,"max_dec_factor_lf": 0.25},"nearend_tuning": {"mask_lf": [1.09,1.1,0.3],"mask_hf": [0.1,0.3,0.3],"max_inc_factor": 2,"max_dec_factor_lf": 0.25},"dominant_nearend_detection": {"enr_threshold": 0.25,"enr_exit_threshold": 10,"snr_threshold": 30,"hold_duration": 50,"trigger_threshold": 12,"use_during_initial_phase": 1},"high_bands_suppression": {"enr_threshold": 1,"max_gain_during_echo": 1},"floor_first_increase": 1e-05,"enforce_transparent": false,"enforce_empty_higher_bands": false}}}
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-1 b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-1
new file mode 100644
index 0000000000..cda107c534
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-1
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-2 b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-2
new file mode 100644
index 0000000000..bf1a98dccd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-2
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-3 b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-3
new file mode 100644
index 0000000000..f28283502a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-3
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-4 b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-4
new file mode 100644
index 0000000000..7b5d1cd790
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/agc-corpus/agc-4
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-0 b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-0
new file mode 100644
index 0000000000..da32f0c29c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-0
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-1 b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-1
new file mode 100644
index 0000000000..097af39c3f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-1
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-2 b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-2
new file mode 100644
index 0000000000..a9f4011d3f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-2
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-3 b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-3
new file mode 100644
index 0000000000..b4ba1578cd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/audio_processing-corpus/audio-processing-3
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-0 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-0
new file mode 100644
index 0000000000..fc56e0e591
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-0
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-1 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-1
new file mode 100644
index 0000000000..b5c147f9f3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-1
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-10 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-10
new file mode 100644
index 0000000000..ced48741aa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-10
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-11 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-11
new file mode 100644
index 0000000000..53afe97468
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-11
@@ -0,0 +1 @@
+ÿÿÿÿÿ°Ë±±o \ No newline at end of file
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-12 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-12
new file mode 100644
index 0000000000..4d126eacc3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-12
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-13 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-13
new file mode 100644
index 0000000000..847e67b79a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-13
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-14 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-14
new file mode 100644
index 0000000000..f5efdd1317
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-14
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-15 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-15
new file mode 100644
index 0000000000..830dc8a679
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-15
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-16 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-16
new file mode 100644
index 0000000000..06017917dc
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-16
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-2 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-2
new file mode 100644
index 0000000000..43cf72619c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-2
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-3 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-3
new file mode 100644
index 0000000000..a2e8e0c9ae
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-3
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-4 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-4
new file mode 100644
index 0000000000..ff5785f0c3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-4
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-5 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-5
new file mode 100644
index 0000000000..f1196c98aa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-5
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-6 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-6
new file mode 100644
index 0000000000..88ce0a1600
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-6
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-7 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-7
new file mode 100644
index 0000000000..9adb5c2125
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-7
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-8 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-8
new file mode 100644
index 0000000000..5a8f929a61
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-8
@@ -0,0 +1,2 @@
+ÿÿÿÞ
+‚ÿ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-9 b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-9
new file mode 100644
index 0000000000..bde1b127b3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/dependency_descriptor-corpus/dependency-descriptor-9
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-0 b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-0
new file mode 100644
index 0000000000..73e1d22f10
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-0
@@ -0,0 +1 @@
+WebRTC-DecoderDataDumpDirectory/Enabled/
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-1 b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-1
new file mode 100644
index 0000000000..997888a6fd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-1
@@ -0,0 +1 @@
+WebRTC-DecoderDataDumpDirectory/Disabled/
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-2 b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-2
new file mode 100644
index 0000000000..7e897a418d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/field_trial-corpus/field-trial-2
@@ -0,0 +1 @@
+WebRTC-DecoderDataDumpDirectory/Disabled/WebRTC-IPv6Default/Enabled/
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-0 b/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-0
new file mode 100644
index 0000000000..dbe089f278
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-0
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-1 b/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-1
new file mode 100644
index 0000000000..dcb9c476f0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/h264-depacketizer-fuzzer-corpus/h264-1
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/1.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/1.mdns
new file mode 100644
index 0000000000..6e1789a83b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/1.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/10.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/10.mdns
new file mode 100644
index 0000000000..f5fa6f8d17
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/10.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/11.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/11.mdns
new file mode 100644
index 0000000000..f72f28c13c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/11.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/12.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/12.mdns
new file mode 100644
index 0000000000..9efa76d64a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/12.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/13.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/13.mdns
new file mode 100644
index 0000000000..538a5a62dc
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/13.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/14.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/14.mdns
new file mode 100644
index 0000000000..d4ce4efea8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/14.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/15.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/15.mdns
new file mode 100644
index 0000000000..ff2810565b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/15.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/16.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/16.mdns
new file mode 100644
index 0000000000..a1a02d7f8e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/16.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/17.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/17.mdns
new file mode 100644
index 0000000000..ba30f7bc2f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/17.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/18.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/18.mdns
new file mode 100644
index 0000000000..7cbdd3e7a0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/18.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/19.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/19.mdns
new file mode 100644
index 0000000000..f70eaa3ab2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/19.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/2.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/2.mdns
new file mode 100644
index 0000000000..7d259c2ea9
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/2.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/20.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/20.mdns
new file mode 100644
index 0000000000..6681f943e4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/20.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/3.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/3.mdns
new file mode 100644
index 0000000000..3ac4fd6c98
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/3.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/4.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/4.mdns
new file mode 100644
index 0000000000..3207842f48
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/4.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/5.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/5.mdns
new file mode 100644
index 0000000000..871ea41405
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/5.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/6.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/6.mdns
new file mode 100644
index 0000000000..ee2f8eca1c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/6.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/7.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/7.mdns
new file mode 100644
index 0000000000..d37b935654
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/7.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/8.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/8.mdns
new file mode 100644
index 0000000000..dd2f976afd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/8.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/9.mdns b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/9.mdns
new file mode 100644
index 0000000000..a01e729342
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/mdns-corpus/9.mdns
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/pseudotcp-corpus/785b96587d0eb44dd5d75b7a886f37e2ac504511 b/third_party/libwebrtc/test/fuzzers/corpora/pseudotcp-corpus/785b96587d0eb44dd5d75b7a886f37e2ac504511
new file mode 100644
index 0000000000..21f5cffa12
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/pseudotcp-corpus/785b96587d0eb44dd5d75b7a886f37e2ac504511
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/0.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/0.rtcp
new file mode 100644
index 0000000000..802eecf052
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/0.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/1.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/1.rtcp
new file mode 100644
index 0000000000..c062793b3c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/1.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/10.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/10.rtcp
new file mode 100644
index 0000000000..fdc7780218
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/10.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/11.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/11.rtcp
new file mode 100644
index 0000000000..994f721dca
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/11.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/12.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/12.rtcp
new file mode 100644
index 0000000000..557fe9c23a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/12.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/13.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/13.rtcp
new file mode 100644
index 0000000000..0f1b1809a7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/13.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/14.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/14.rtcp
new file mode 100644
index 0000000000..df6bfed86c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/14.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/15.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/15.rtcp
new file mode 100644
index 0000000000..3b31b3079d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/15.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/16.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/16.rtcp
new file mode 100644
index 0000000000..0496ddf910
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/16.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/17.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/17.rtcp
new file mode 100644
index 0000000000..755b7198ba
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/17.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/18.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/18.rtcp
new file mode 100644
index 0000000000..04fd1e3417
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/18.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/19.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/19.rtcp
new file mode 100644
index 0000000000..8ae9812f10
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/19.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/2.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/2.rtcp
new file mode 100644
index 0000000000..ba38a0a599
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/2.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/20.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/20.rtcp
new file mode 100644
index 0000000000..debb7de940
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/20.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/21.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/21.rtcp
new file mode 100644
index 0000000000..3fcbd405e2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/21.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/22.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/22.rtcp
new file mode 100644
index 0000000000..3defd8c253
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/22.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/23.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/23.rtcp
new file mode 100644
index 0000000000..211ccbd5be
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/23.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/24.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/24.rtcp
new file mode 100644
index 0000000000..8ded9be7fd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/24.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/25.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/25.rtcp
new file mode 100644
index 0000000000..b5c8146cef
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/25.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/26.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/26.rtcp
new file mode 100644
index 0000000000..0fd4f25511
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/26.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/27.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/27.rtcp
new file mode 100644
index 0000000000..2c8bb63c9c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/27.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/28.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/28.rtcp
new file mode 100644
index 0000000000..6a20bc27d8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/28.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/29.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/29.rtcp
new file mode 100644
index 0000000000..76fd214197
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/29.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/3.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/3.rtcp
new file mode 100644
index 0000000000..72307e08bd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/3.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/30.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/30.rtcp
new file mode 100644
index 0000000000..cfa38faa67
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/30.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/31.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/31.rtcp
new file mode 100644
index 0000000000..8abf725057
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/31.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/32.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/32.rtcp
new file mode 100644
index 0000000000..76518c88dd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/32.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/33.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/33.rtcp
new file mode 100644
index 0000000000..92964965a6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/33.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/34.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/34.rtcp
new file mode 100644
index 0000000000..9045c158b5
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/34.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/35.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/35.rtcp
new file mode 100644
index 0000000000..f86df02448
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/35.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/36.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/36.rtcp
new file mode 100644
index 0000000000..d274d416a2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/36.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/37.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/37.rtcp
new file mode 100644
index 0000000000..b4f04f4069
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/37.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/38.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/38.rtcp
new file mode 100644
index 0000000000..8d65e0ff35
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/38.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/39.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/39.rtcp
new file mode 100644
index 0000000000..fb969f85f0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/39.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/4.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/4.rtcp
new file mode 100644
index 0000000000..5170185122
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/4.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/40.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/40.rtcp
new file mode 100644
index 0000000000..279047c6b2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/40.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/41.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/41.rtcp
new file mode 100644
index 0000000000..09a8a589bd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/41.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/42.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/42.rtcp
new file mode 100644
index 0000000000..f727b190a7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/42.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/43.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/43.rtcp
new file mode 100644
index 0000000000..9aab33a48a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/43.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/44.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/44.rtcp
new file mode 100644
index 0000000000..2eba529c3a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/44.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/45.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/45.rtcp
new file mode 100644
index 0000000000..50c0beabed
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/45.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/46.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/46.rtcp
new file mode 100644
index 0000000000..2de424a5d1
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/46.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/47.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/47.rtcp
new file mode 100644
index 0000000000..71d530b41f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/47.rtcp
@@ -0,0 +1 @@
+€ \ No newline at end of file
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/48.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/48.rtcp
new file mode 100644
index 0000000000..665695029a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/48.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/49.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/49.rtcp
new file mode 100644
index 0000000000..33b0f3d873
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/49.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/5.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/5.rtcp
new file mode 100644
index 0000000000..46bef3242a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/5.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/50.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/50.rtcp
new file mode 100644
index 0000000000..5bec125397
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/50.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/51.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/51.rtcp
new file mode 100644
index 0000000000..ff94fb84b7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/51.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/52.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/52.rtcp
new file mode 100644
index 0000000000..55cc76cd81
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/52.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/53.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/53.rtcp
new file mode 100644
index 0000000000..ac11523c94
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/53.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/54.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/54.rtcp
new file mode 100644
index 0000000000..f3f0bb8fa1
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/54.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/55.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/55.rtcp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/55.rtcp
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/56.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/56.rtcp
new file mode 100644
index 0000000000..f0012f1efa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/56.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/57.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/57.rtcp
new file mode 100644
index 0000000000..61517f01d8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/57.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/58.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/58.rtcp
new file mode 100644
index 0000000000..3688310a05
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/58.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/59.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/59.rtcp
new file mode 100644
index 0000000000..e914f83b16
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/59.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/6.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/6.rtcp
new file mode 100644
index 0000000000..dcad8f6b40
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/6.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/60.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/60.rtcp
new file mode 100644
index 0000000000..8e999e7832
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/60.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/61.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/61.rtcp
new file mode 100644
index 0000000000..eb38d237aa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/61.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/62.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/62.rtcp
new file mode 100644
index 0000000000..6df94b715f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/62.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/63.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/63.rtcp
new file mode 100644
index 0000000000..a3b2acb3a8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/63.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/64.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/64.rtcp
new file mode 100644
index 0000000000..4d50c0f4ae
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/64.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/65.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/65.rtcp
new file mode 100644
index 0000000000..537d10c6ce
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/65.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/66.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/66.rtcp
new file mode 100644
index 0000000000..f280f3ca09
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/66.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/7.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/7.rtcp
new file mode 100644
index 0000000000..f8f74e12c6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/7.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/8.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/8.rtcp
new file mode 100644
index 0000000000..f0ceba83f4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/8.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/9.rtcp b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/9.rtcp
new file mode 100644
index 0000000000..7cc07893c3
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtcp-corpus/9.rtcp
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-0 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-0
new file mode 100644
index 0000000000..c93ce82ec0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-0
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-1 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-1
new file mode 100644
index 0000000000..84834aefea
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-1
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-2 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-2
new file mode 100644
index 0000000000..3a5aedba9b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-2
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-3 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-3
new file mode 100644
index 0000000000..300309ff37
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-3
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-4 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-4
new file mode 100644
index 0000000000..84834aefea
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-4
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-5 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-5
new file mode 100644
index 0000000000..7dd6632007
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-5
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-6 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-6
new file mode 100644
index 0000000000..3a1145cd0f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-6
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-7 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-7
new file mode 100644
index 0000000000..6c4a9feb52
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-7
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-8 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-8
new file mode 100644
index 0000000000..4b069389b1
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-corpus/rtp-8
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtp-depacketizer-av1-assemble-frame-corpus/av1-assemble-frame-0 b/third_party/libwebrtc/test/fuzzers/corpora/rtp-depacketizer-av1-assemble-frame-corpus/av1-assemble-frame-0
new file mode 100644
index 0000000000..540a770e29
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtp-depacketizer-av1-assemble-frame-corpus/av1-assemble-frame-0
@@ -0,0 +1 @@
+ô0ÿÿÿ¸¸¸ \ No newline at end of file
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp8/vp8.rtpdump b/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp8/vp8.rtpdump
new file mode 100644
index 0000000000..328559820d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp8/vp8.rtpdump
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp9/vp9.rtpdump b/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp9/vp9.rtpdump
new file mode 100644
index 0000000000..4dc780cd16
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/rtpdump-corpus/vp9/vp9.rtpdump
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-ack-sack.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-ack-sack.bin
new file mode 100644
index 0000000000..4374f5aad5
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-ack-sack.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data-data.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data-data.bin
new file mode 100644
index 0000000000..1f1d0be301
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data-data.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data.bin
new file mode 100644
index 0000000000..21a0c22837
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data-data.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data.bin
new file mode 100644
index 0000000000..fc8600106e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/cookie-echo-data.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/data-fragment1.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/data-fragment1.bin
new file mode 100644
index 0000000000..bec7b289e7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/data-fragment1.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/forward-tsn.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/forward-tsn.bin
new file mode 100644
index 0000000000..ab98a0a4a7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/forward-tsn.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat-ack.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat-ack.bin
new file mode 100644
index 0000000000..59200abe5e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat-ack.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat.bin
new file mode 100644
index 0000000000..cef8cfe929
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/heartbeat.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init-ack.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init-ack.bin
new file mode 100644
index 0000000000..80438434d0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init-ack.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init.bin
new file mode 100644
index 0000000000..3fb4977d58
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/init.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/re-config.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/re-config.bin
new file mode 100644
index 0000000000..74c74f3377
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/re-config.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-data.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-data.bin
new file mode 100644
index 0000000000..fe4de63863
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-data.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-gap-ack-1.bin b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-gap-ack-1.bin
new file mode 100644
index 0000000000..08494c1515
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sctp-packet-corpus/sack-gap-ack-1.bin
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/10.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/10.sdp
new file mode 100644
index 0000000000..36319c4476
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/10.sdp
@@ -0,0 +1,11 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:97 iLBC/8000
+m=audio 49172 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=sendonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/11.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/11.sdp
new file mode 100644
index 0000000000..c3dbce401f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/11.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=audio 49174 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=recvonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/12.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/12.sdp
new file mode 100644
index 0000000000..26d4ff6266
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/12.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/13.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/13.sdp
new file mode 100644
index 0000000000..f625e3890c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/13.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49174 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 49170 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/14.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/14.sdp
new file mode 100644
index 0000000000..c36b9fe6cb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/14.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 newhost.biloxi.example.com
+t=0 0
+m=audio 49178 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 49188 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/15.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/15.sdp
new file mode 100644
index 0000000000..26d4ff6266
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/15.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/16.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/16.sdp
new file mode 100644
index 0000000000..1d187a0dc5
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/16.sdp
@@ -0,0 +1,10 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=audio 51372 RTP/AVP 97 101
+a=rtpmap:97 iLBC/8000
+a=rtpmap:101 telephone-event/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/17.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/17.sdp
new file mode 100644
index 0000000000..a328d16a16
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/17.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 0 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=audio 49170 RTP/AVP 97 101
+a=rtpmap:97 iLBC/8000
+a=rtpmap:101 telephone-event/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/18.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/18.sdp
new file mode 100644
index 0000000000..1e0d3f5705
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/18.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 99
+a=rtpmap:99 iLBC/8000
+m=video 51372 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/19.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/19.sdp
new file mode 100644
index 0000000000..a9a5e12138
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/19.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 99
+a=rtpmap:99 iLBC/8000
+m=video 51374 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/2.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/2.sdp
new file mode 100644
index 0000000000..96c4975ec0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/2.sdp
@@ -0,0 +1,12 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 8 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/20.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/20.sdp
new file mode 100644
index 0000000000..b2c072af1d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/20.sdp
@@ -0,0 +1,10 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 99
+a=rtpmap:99 iLBC/8000
+m=video 51372 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/21.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/21.sdp
new file mode 100644
index 0000000000..28e949133e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/21.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 99
+a=rtpmap:99 iLBC/8000
+m=video 51374 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/22.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/22.sdp
new file mode 100644
index 0000000000..96c4975ec0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/22.sdp
@@ -0,0 +1,12 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 8 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/23.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/23.sdp
new file mode 100644
index 0000000000..36c7d70148
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/23.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49174 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 49172 RTP/AVP 32
+c=IN IP4 otherhost.biloxi.example.com
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/24.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/24.sdp
new file mode 100644
index 0000000000..6cf5dc5894
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/24.sdp
@@ -0,0 +1,8 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/25.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/25.sdp
new file mode 100644
index 0000000000..ac1ec85b29
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/25.sdp
@@ -0,0 +1,8 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 placeholder.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+a=sendonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/26.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/26.sdp
new file mode 100644
index 0000000000..f0eb0d62a7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/26.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/27.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/27.sdp
new file mode 100644
index 0000000000..1e4f3b3e19
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/27.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49178 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/28.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/28.sdp
new file mode 100644
index 0000000000..36319c4476
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/28.sdp
@@ -0,0 +1,11 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:97 iLBC/8000
+m=audio 49172 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=sendonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/29.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/29.sdp
new file mode 100644
index 0000000000..c3dbce401f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/29.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=audio 49174 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=recvonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/3.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/3.sdp
new file mode 100644
index 0000000000..b0142f5dcf
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/3.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49174 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 49170 RTP/AVP 32
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/30.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/30.sdp
new file mode 100644
index 0000000000..0798631d2e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/30.sdp
@@ -0,0 +1,11 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+a=sendonly
+m=audio 49174 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=recvonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/31.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/31.sdp
new file mode 100644
index 0000000000..78efdd8fcb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/31.sdp
@@ -0,0 +1,11 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:97 iLBC/8000
+m=audio 49172 RTP/AVP 98
+a=rtpmap:98 telephone-event/8000
+a=sendonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/32.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/32.sdp
new file mode 100644
index 0000000000..6cf5dc5894
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/32.sdp
@@ -0,0 +1,8 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/33.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/33.sdp
new file mode 100644
index 0000000000..23f462f5a6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/33.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/34.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/34.sdp
new file mode 100644
index 0000000000..b52b947486
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/34.sdp
@@ -0,0 +1,11 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=audio 48282 RTP/AVP 98
+c=IN IP4 mediaserver.biloxi.example.com
+a=rtpmap:98 telephone-event/8000
+a=recvonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/35.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/35.sdp
new file mode 100644
index 0000000000..07ad4f8758
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/35.sdp
@@ -0,0 +1,11 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=audio 49172 RTP/AVP 98
+c=IN IP4 host.atlanta.example.com
+a=rtpmap:98 telephone-event/8000
+a=sendonly
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/36.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/36.sdp
new file mode 100644
index 0000000000..c75c6977bb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/36.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/37.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/37.sdp
new file mode 100644
index 0000000000..d05dbd61fd
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/37.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/38.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/38.sdp
new file mode 100644
index 0000000000..253ac0705a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/38.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 49172 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/39.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/39.sdp
new file mode 100644
index 0000000000..57d3203c74
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/39.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 49168 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/4.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/4.sdp
new file mode 100644
index 0000000000..96c4975ec0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/4.sdp
@@ -0,0 +1,12 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 8 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/40.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/40.sdp
new file mode 100644
index 0000000000..26d4ff6266
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/40.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/41.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/41.sdp
new file mode 100644
index 0000000000..f625e3890c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/41.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49174 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 49170 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/42.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/42.sdp
new file mode 100644
index 0000000000..2dda9bb252
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/42.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49174 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 0 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/43.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/43.sdp
new file mode 100644
index 0000000000..83a2bf9e66
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/43.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+m=video 0 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/44.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/44.sdp
new file mode 100644
index 0000000000..e187d7df7d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/44.sdp
@@ -0,0 +1,5 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/45.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/45.sdp
new file mode 100644
index 0000000000..9e344f035a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/45.sdp
@@ -0,0 +1,5 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/46.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/46.sdp
new file mode 100644
index 0000000000..8a63ca0992
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/46.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/47.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/47.sdp
new file mode 100644
index 0000000000..66dadbb93f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/47.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/48.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/48.sdp
new file mode 100644
index 0000000000..6df76ab942
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/48.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 0.0.0.0
+t=0 0
+m=audio 23442 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/49.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/49.sdp
new file mode 100644
index 0000000000..23f462f5a6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/49.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/5.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/5.sdp
new file mode 100644
index 0000000000..264ab8024f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/5.sdp
@@ -0,0 +1,10 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 0 8
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+m=video 0 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/50.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/50.sdp
new file mode 100644
index 0000000000..8a63ca0992
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/50.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/51.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/51.sdp
new file mode 100644
index 0000000000..23f462f5a6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/51.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/52.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/52.sdp
new file mode 100644
index 0000000000..7c1730eb1e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/52.sdp
@@ -0,0 +1,7 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/53.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/53.sdp
new file mode 100644
index 0000000000..599157691a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/53.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 0.0.0.0
+t=0 0
+m=audio 9322 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/54.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/54.sdp
new file mode 100644
index 0000000000..66dadbb93f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/54.sdp
@@ -0,0 +1,7 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/55.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/55.sdp
new file mode 100644
index 0000000000..bd724c471b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/55.sdp
@@ -0,0 +1,8 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 97
+a=rtpmap:97 iLBC/8000
+
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/6.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/6.sdp
new file mode 100644
index 0000000000..3f80345e30
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/6.sdp
@@ -0,0 +1,9 @@
+v=0
+o=alice 2890844526 2890844527 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 51372 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 0 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/7.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/7.sdp
new file mode 100644
index 0000000000..920ad8229e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/7.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844565 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 0
+a=rtpmap:0 PCMU/8000
+m=video 0 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/8.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/8.sdp
new file mode 100644
index 0000000000..96c4975ec0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/8.sdp
@@ -0,0 +1,12 @@
+v=0
+o=alice 2890844526 2890844526 IN IP4 host.atlanta.example.com
+s=
+c=IN IP4 host.atlanta.example.com
+t=0 0
+m=audio 49170 RTP/AVP 0 8 97
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:97 iLBC/8000
+m=video 51372 RTP/AVP 31 32
+a=rtpmap:31 H261/90000
+a=rtpmap:32 MPV/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/9.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/9.sdp
new file mode 100644
index 0000000000..a9a5e12138
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/9.sdp
@@ -0,0 +1,9 @@
+v=0
+o=bob 2808844564 2808844564 IN IP4 host.biloxi.example.com
+s=
+c=IN IP4 host.biloxi.example.com
+t=0 0
+m=audio 49172 RTP/AVP 99
+a=rtpmap:99 iLBC/8000
+m=video 51374 RTP/AVP 31
+a=rtpmap:31 H261/90000
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-1.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-1.sdp
new file mode 100644
index 0000000000..6d664bc051
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-1.sdp
@@ -0,0 +1,58 @@
+v=0
+o=mozilla...THIS_IS_SDPARTA-46.0.1 5115930144083302970 0 IN IP4 0.0.0.0
+s=-
+t=0 0
+a=fingerprint:sha-256 24:67:5E:1B:9A:B9:CF:36:C5:30:8F:35:F7:B1:50:66:88:81:92:CB:29:BA:53:A5:02:C8:0A:A5:4E:9C:AE:D9
+a=group:BUNDLE sdparta_0 sdparta_1 sdparta_2
+a=ice-options:trickle
+a=msid-semantic:WMS *
+m=audio 9 UDP/TLS/RTP/SAVPF 109 9 0 8
+c=IN IP4 0.0.0.0
+a=sendrecv
+a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level
+a=fmtp:109 maxplaybackrate=48000;stereo=1
+a=ice-pwd:b9f3911b591ae61e2d7f6af0531fd2a3
+a=ice-ufrag:3edc9012
+a=mid:sdparta_0
+a=msid:{258e92fb-547c-40ca-92e9-efe0cedb4cba} {bd1fafff-bfd0-40d4-b0a3-2a87cff307ee}
+a=rtcp-mux
+a=rtpmap:109 opus/48000/2
+a=rtpmap:9 G722/8000/1
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=setup:actpass
+a=ssrc:2121669360 cname:{387b0735-bde2-43a4-8484-7f5663b60f24}
+m=video 9 UDP/TLS/RTP/SAVPF 120 126 97
+c=IN IP4 0.0.0.0
+a=sendrecv
+a=fmtp:126 profile-level-id=42e01f;level-asymmetry-allowed=1;packetization-mode=1
+a=fmtp:97 profile-level-id=42e01f;level-asymmetry-allowed=1
+a=fmtp:120 max-fs=12288;max-fr=60
+a=ice-pwd:b9f3911b591ae61e2d7f6af0531fd2a3
+a=ice-ufrag:3edc9012
+a=mid:sdparta_1
+a=msid:{258e92fb-547c-40ca-92e9-efe0cedb4cba} {9e8f5867-a9aa-4489-8bd4-3a8a57a5e592}
+a=rtcp-fb:120 nack
+a=rtcp-fb:120 nack pli
+a=rtcp-fb:120 ccm fir
+a=rtcp-fb:126 nack
+a=rtcp-fb:126 nack pli
+a=rtcp-fb:126 ccm fir
+a=rtcp-fb:97 nack
+a=rtcp-fb:97 nack pli
+a=rtcp-fb:97 ccm fir
+a=rtcp-mux
+a=rtpmap:120 VP8/90000
+a=rtpmap:126 H264/90000
+a=rtpmap:97 H264/90000
+a=setup:actpass
+a=ssrc:2158832026 cname:{387b0735-bde2-43a4-8484-7f5663b60f24}
+m=application 9 DTLS/SCTP 5000
+c=IN IP4 0.0.0.0
+a=sendrecv
+a=ice-pwd:b9f3911b591ae61e2d7f6af0531fd2a3
+a=ice-ufrag:3edc9012
+a=mid:sdparta_2
+a=sctpmap:5000 webrtc-datachannel 256
+a=setup:actpass
+a=ssrc:2670959794 cname:{387b0735-bde2-43a4-8484-7f5663b60f24}
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-2.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-2.sdp
new file mode 100644
index 0000000000..07806eb225
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/firefox-2.sdp
@@ -0,0 +1,43 @@
+v=0
+o=mozilla...THIS_IS_SDPARTA-46.0.1 3068771576687940834 0 IN IP4 0.0.0.0
+s=-
+t=0 0
+a=fingerprint:sha-256 AD:87:B3:11:E4:E2:BA:EF:D2:3F:2E:AC:24:57:8E:DC:1F:67:41:29:44:C4:96:E3:62:90:CC:90:59:CA:2C:84
+a=group:BUNDLE sdparta_0 sdparta_1 sdparta_2
+a=ice-options:trickle
+a=msid-semantic:WMS *
+m=audio 9 UDP/TLS/RTP/SAVPF 109
+c=IN IP4 0.0.0.0
+a=recvonly
+a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level
+a=fmtp:109 maxplaybackrate=48000;stereo=1
+a=ice-pwd:ff4c4dc6fe92e1f22d2c10352d8967d5
+a=ice-ufrag:a539544b
+a=mid:sdparta_0
+a=rtcp-mux
+a=rtpmap:109 opus/48000/2
+a=setup:active
+a=ssrc:600995474 cname:{5b598a29-a81b-4ffe-a2c5-507778057e7a}
+m=video 9 UDP/TLS/RTP/SAVPF 120
+c=IN IP4 0.0.0.0
+a=recvonly
+a=fmtp:120 max-fs=12288;max-fr=60
+a=ice-pwd:ff4c4dc6fe92e1f22d2c10352d8967d5
+a=ice-ufrag:a539544b
+a=mid:sdparta_1
+a=rtcp-fb:120 nack
+a=rtcp-fb:120 nack pli
+a=rtcp-fb:120 ccm fir
+a=rtcp-mux
+a=rtpmap:120 VP8/90000
+a=setup:active
+a=ssrc:3480150809 cname:{5b598a29-a81b-4ffe-a2c5-507778057e7a}
+m=application 9 DTLS/SCTP 5000
+c=IN IP4 0.0.0.0
+a=sendrecv
+a=ice-pwd:ff4c4dc6fe92e1f22d2c10352d8967d5
+a=ice-ufrag:a539544b
+a=mid:sdparta_2
+a=sctpmap:5000 webrtc-datachannel 256
+a=setup:active
+a=ssrc:3021788991 cname:{5b598a29-a81b-4ffe-a2c5-507778057e7a}
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-1.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-1.sdp
new file mode 100644
index 0000000000..42d15e81f4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-1.sdp
@@ -0,0 +1,85 @@
+v=0
+o=- 1656229333038673902 2 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=group:BUNDLE audio video data
+a=msid-semantic: WMS Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4
+m=audio 9 UDP/TLS/RTP/SAVPF 111 103 104 9 0 8 106 105 13 126
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:1Jyk4q3nLIL5NiMx
+a=ice-pwd:GL8/iarMqPIhImfnsG2dyXlH
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:actpass
+a=mid:audio
+a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level
+a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=sendrecv
+a=rtcp-mux
+a=rtpmap:111 opus/48000/2
+a=rtcp-fb:111 transport-cc
+a=fmtp:111 minptime=10; useinbandfec=1
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=rtpmap:9 G722/8000
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:106 CN/32000
+a=rtpmap:105 CN/16000
+a=rtpmap:13 CN/8000
+a=rtpmap:126 telephone-event/8000
+a=maxptime:60
+a=ssrc:2233075910 cname:VhHMGYCjn4alR9zP
+a=ssrc:2233075910 msid:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4 689d3496-0896-4d52-bce6-8e90512a368b
+a=ssrc:2233075910 mslabel:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4
+a=ssrc:2233075910 label:689d3496-0896-4d52-bce6-8e90512a368b
+m=video 9 UDP/TLS/RTP/SAVPF 100 101 116 117 96 97 98
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:1Jyk4q3nLIL5NiMx
+a=ice-pwd:GL8/iarMqPIhImfnsG2dyXlH
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:actpass
+a=mid:video
+a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
+a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=extmap:4 urn:3gpp:video-orientation
+a=sendrecv
+a=rtcp-mux
+a=rtpmap:100 VP8/90000
+a=rtcp-fb:100 ccm fir
+a=rtcp-fb:100 nack
+a=rtcp-fb:100 nack pli
+a=rtcp-fb:100 goog-remb
+a=rtcp-fb:100 transport-cc
+a=rtpmap:101 VP9/90000
+a=rtcp-fb:101 ccm fir
+a=rtcp-fb:101 nack
+a=rtcp-fb:101 nack pli
+a=rtcp-fb:101 goog-remb
+a=rtcp-fb:101 transport-cc
+a=rtpmap:116 red/90000
+a=rtpmap:117 ulpfec/90000
+a=rtpmap:96 rtx/90000
+a=fmtp:96 apt=100
+a=rtpmap:97 rtx/90000
+a=fmtp:97 apt=101
+a=rtpmap:98 rtx/90000
+a=fmtp:98 apt=116
+a=ssrc-group:FID 50498894 2399294607
+a=ssrc:50498894 cname:VhHMGYCjn4alR9zP
+a=ssrc:50498894 msid:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4 1aef96f4-fc4c-4f86-98f3-0fbf4f625f70
+a=ssrc:50498894 mslabel:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4
+a=ssrc:50498894 label:1aef96f4-fc4c-4f86-98f3-0fbf4f625f70
+a=ssrc:2399294607 cname:VhHMGYCjn4alR9zP
+a=ssrc:2399294607 msid:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4 1aef96f4-fc4c-4f86-98f3-0fbf4f625f70
+a=ssrc:2399294607 mslabel:Ppsa09YmDLBombOh5e8HqfqxEIPF69a46Hd4
+a=ssrc:2399294607 label:1aef96f4-fc4c-4f86-98f3-0fbf4f625f70
+m=application 9 DTLS/SCTP 5000
+c=IN IP4 0.0.0.0
+a=ice-ufrag:1Jyk4q3nLIL5NiMx
+a=ice-pwd:GL8/iarMqPIhImfnsG2dyXlH
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:actpass
+a=mid:data
+a=sctpmap:5000 webrtc-datachannel 1024
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-2.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-2.sdp
new file mode 100644
index 0000000000..6ed4e3b4a8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/opera-2.sdp
@@ -0,0 +1,73 @@
+v=0
+o=- 2013283641453412290 2 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=group:BUNDLE audio video data
+a=msid-semantic: WMS
+m=audio 9 UDP/TLS/RTP/SAVPF 111 103 104 9 0 8 106 105 13 126
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:YVa3KTlFDCwsfPOQ
+a=ice-pwd:ByUn1Od88VokVM0rtQ/bbeZa
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:active
+a=mid:audio
+a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level
+a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=recvonly
+a=rtcp-mux
+a=rtpmap:111 opus/48000/2
+a=rtcp-fb:111 transport-cc
+a=fmtp:111 minptime=10; useinbandfec=1
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=rtpmap:9 G722/8000
+a=rtpmap:0 PCMU/8000
+a=rtpmap:8 PCMA/8000
+a=rtpmap:106 CN/32000
+a=rtpmap:105 CN/16000
+a=rtpmap:13 CN/8000
+a=rtpmap:126 telephone-event/8000
+a=maxptime:60
+m=video 9 UDP/TLS/RTP/SAVPF 100 101 116 117 96 97 98
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:YVa3KTlFDCwsfPOQ
+a=ice-pwd:ByUn1Od88VokVM0rtQ/bbeZa
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:active
+a=mid:video
+a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
+a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=extmap:4 urn:3gpp:video-orientation
+a=recvonly
+a=rtcp-mux
+a=rtpmap:100 VP8/90000
+a=rtcp-fb:100 ccm fir
+a=rtcp-fb:100 nack
+a=rtcp-fb:100 nack pli
+a=rtcp-fb:100 goog-remb
+a=rtcp-fb:100 transport-cc
+a=rtpmap:101 VP9/90000
+a=rtcp-fb:101 ccm fir
+a=rtcp-fb:101 nack
+a=rtcp-fb:101 nack pli
+a=rtcp-fb:101 goog-remb
+a=rtcp-fb:101 transport-cc
+a=rtpmap:116 red/90000
+a=rtpmap:117 ulpfec/90000
+a=rtpmap:96 rtx/90000
+a=fmtp:96 apt=100
+a=rtpmap:97 rtx/90000
+a=fmtp:97 apt=101
+a=rtpmap:98 rtx/90000
+a=fmtp:98 apt=116
+m=application 9 DTLS/SCTP 5000
+c=IN IP4 0.0.0.0
+b=AS:30
+a=ice-ufrag:YVa3KTlFDCwsfPOQ
+a=ice-pwd:ByUn1Od88VokVM0rtQ/bbeZa
+a=fingerprint:sha-256 5A:16:96:94:B2:AC:60:27:64:C5:FE:46:6C:02:C0:CD:49:E3:E2:0B:5B:C9:D4:86:C4:B3:A4:F2:23:80:7A:DA
+a=setup:active
+a=mid:data
+a=sctpmap:5000 webrtc-datachannel 1024
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.1.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.1.sdp
new file mode 100644
index 0000000000..e7a219812b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.1.sdp
@@ -0,0 +1,41 @@
+v=0
+o=- 367669362084170381 2 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=group:BUNDLE 0
+a=msid-semantic: WMS VfhSdt9LWGwoduWpoASvxGyAGEQFAkQe1hT1
+m=video 9 UDP/TLS/RTP/SAVPF 96
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:OLwt
+a=ice-pwd:kjGBqIFYs8UqCyfmJ7nEJw/Q
+a=ice-options:trickle
+a=fingerprint:sha-256 B4:AB:3E:01:90:D1:FC:22:8F:05:6A:01:76:B6:C5:0D:45:88:1A:E1:99:AF:18:43:E6:18:BB:D0:92:51:9B:0E
+a=setup:actpass
+a=mid:0
+a=extmap:14 urn:ietf:params:rtp-hdrext:toffset
+a=extmap:13 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=extmap:12 urn:3gpp:video-orientation
+a=extmap:2 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01
+a=extmap:11 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay
+a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type
+a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing
+a=extmap:8 http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07
+a=extmap:9 http://www.webrtc.org/experiments/rtp-hdrext/color-space
+a=extmap:3 urn:ietf:params:rtp-hdrext:sdes:mid
+a=extmap:4 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id
+a=extmap:5 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id
+a=sendrecv
+a=msid:VfhSdt9LWGwoduWpoASvxGyAGEQFAkQe1hT1 a8c06601-e9ed-4312-a7d4-283e078c5966
+a=rtcp-mux
+a=rtcp-rsize
+a=rtpmap:96 VP8/90000
+a=rtcp-fb:96 goog-remb
+a=rtcp-fb:96 transport-cc
+a=rtcp-fb:96 ccm fir
+a=rtcp-fb:96 nack
+a=rtcp-fb:96 nack pli
+a=rid:f send
+a=rid:h send
+a=rid:q send
+a=simulcast:send f;h;q
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.2.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.2.sdp
new file mode 100644
index 0000000000..52879f9e66
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/simulcast.2.sdp
@@ -0,0 +1,41 @@
+v=0
+o=- 367669362084170381 2 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=group:BUNDLE 0
+a=msid-semantic: WMS VfhSdt9LWGwoduWpoASvxGyAGEQFAkQe1hT1
+m=video 9 UDP/TLS/RTP/SAVPF 96
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:OLwt
+a=ice-pwd:kjGBqIFYs8UqCyfmJ7nEJw/Q
+a=ice-options:trickle
+a=fingerprint:sha-256 B4:AB:3E:01:90:D1:FC:22:8F:05:6A:01:76:B6:C5:0D:45:88:1A:E1:99:AF:18:43:E6:18:BB:D0:92:51:9B:0E
+a=setup:actpass
+a=mid:0
+a=extmap:14 urn:ietf:params:rtp-hdrext:toffset
+a=extmap:13 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time
+a=extmap:12 urn:3gpp:video-orientation
+a=extmap:2 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01
+a=extmap:11 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay
+a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type
+a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing
+a=extmap:8 http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07
+a=extmap:9 http://www.webrtc.org/experiments/rtp-hdrext/color-space
+a=extmap:3 urn:ietf:params:rtp-hdrext:sdes:mid
+a=extmap:4 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id
+a=extmap:5 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id
+a=sendrecv
+a=msid:VfhSdt9LWGwoduWpoASvxGyAGEQFAkQe1hT1 a8c06601-e9ed-4312-a7d4-283e078c5966
+a=rtcp-mux
+a=rtcp-rsize
+a=rtpmap:96 VP8/90000
+a=rtcp-fb:96 goog-remb
+a=rtcp-fb:96 transport-cc
+a=rtcp-fb:96 ccm fir
+a=rtcp-fb:96 nack
+a=rtcp-fb:96 nack pli
+a=rid:f recv
+a=rid:h recv
+a=rid:q recv
+a=simulcast:recv f;h;q
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-1.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-1.sdp
new file mode 100644
index 0000000000..ce349ac30f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-1.sdp
@@ -0,0 +1,52 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS local_stream_1
+m=audio 2345 RTP/SAVPF 111 103 104
+c=IN IP4 74.125.127.126
+a=rtcp:2347 IN IP4 74.125.127.126
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host generation 2
+a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx raddr 192.168.1.5 rport 2346 generation 2
+a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx raddr 192.168.1.5 rport 2348 generation 2
+a=ice-ufrag:ufrag_voice
+a=ice-pwd:pwd_voice
+a=mid:audio_content_name
+a=sendrecv
+a=rtcp-mux
+a=rtcp-rsize
+a=crypto:1 AES_CM_128_HMAC_SHA1_32 inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 dummy_session_params
+a=rtpmap:111 opus/48000/2
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=ssrc:1 cname:stream_1_cname
+a=ssrc:1 msid:local_stream_1 audio_track_id_1
+a=ssrc:1 mslabel:local_stream_1
+a=ssrc:1 label:audio_track_id_1
+m=video 3457 RTP/SAVPF 120
+c=IN IP4 74.125.224.39
+a=rtcp:3456 IN IP4 74.125.224.39
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host generation 2
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host generation 2
+a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay generation 2
+a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay generation 2
+a=ice-ufrag:ufrag_video
+a=ice-pwd:pwd_video
+a=mid:video_content_name
+a=sendrecv
+a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32
+a=rtpmap:120 VP8/90000
+a=ssrc-group:FEC 2 3
+a=ssrc:2 cname:stream_1_cname
+a=ssrc:2 msid:local_stream_1 video_track_id_1
+a=ssrc:2 mslabel:local_stream_1
+a=ssrc:2 label:video_track_id_1
+a=ssrc:3 cname:stream_1_cname
+a=ssrc:3 msid:local_stream_1 video_track_id_1
+a=ssrc:3 mslabel:local_stream_1
+a=ssrc:3 label:video_track_id_1
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-2.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-2.sdp
new file mode 100644
index 0000000000..a35392513f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-2.sdp
@@ -0,0 +1,40 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS local_stream_1
+m=audio 9 RTP/SAVPF 111 103 104
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:ufrag_voice
+a=ice-pwd:pwd_voice
+a=mid:audio_content_name
+a=sendrecv
+a=rtcp-mux
+a=rtcp-rsize
+a=crypto:1 AES_CM_128_HMAC_SHA1_32 inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 dummy_session_params
+a=rtpmap:111 opus/48000/2
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=ssrc:1 cname:stream_1_cname
+a=ssrc:1 msid:local_stream_1 audio_track_id_1
+a=ssrc:1 mslabel:local_stream_1
+a=ssrc:1 label:audio_track_id_1
+m=video 9 RTP/SAVPF 120
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:ufrag_video
+a=ice-pwd:pwd_video
+a=mid:video_content_name
+a=sendrecv
+a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32
+a=rtpmap:120 VP8/90000
+a=ssrc-group:FEC 2 3
+a=ssrc:2 cname:stream_1_cname
+a=ssrc:2 msid:local_stream_1 video_track_id_1
+a=ssrc:2 mslabel:local_stream_1
+a=ssrc:2 label:video_track_id_1
+a=ssrc:3 cname:stream_1_cname
+a=ssrc:3 msid:local_stream_1 video_track_id_1
+a=ssrc:3 mslabel:local_stream_1
+a=ssrc:3 label:video_track_id_1
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-3.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-3.sdp
new file mode 100644
index 0000000000..50f8e55ef7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-3.sdp
@@ -0,0 +1,13 @@
+m=application 9 RTP/SAVPF 101
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:ufrag_data
+a=ice-pwd:pwd_data
+a=mid:data_content_name
+a=sendrecv
+a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5
+a=rtpmap:101 google-data/90000
+a=ssrc:10 cname:data_channel_cname
+a=ssrc:10 msid:data_channel data_channeld0
+a=ssrc:10 mslabel:data_channel
+a=ssrc:10 label:data_channeld0
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-4.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-4.sdp
new file mode 100644
index 0000000000..709db42c00
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-4.sdp
@@ -0,0 +1,11 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS
+m=audio 9 RTP/SAVPF 111 103 104
+c=IN IP4 0.0.0.0
+a=x-google-flag:conference
+m=video 9 RTP/SAVPF 120
+c=IN IP4 0.0.0.0
+a=x-google-flag:conference
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-5.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-5.sdp
new file mode 100644
index 0000000000..c440386062
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-5.sdp
@@ -0,0 +1,5 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS local_stream
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-6.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-6.sdp
new file mode 100644
index 0000000000..e07b5647bf
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-6.sdp
@@ -0,0 +1,12 @@
+m=audio 9 RTP/SAVPF 111
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:ufrag_voice
+a=ice-pwd:pwd_voice
+a=mid:audio_content_name
+a=sendrecv
+a=rtpmap:111 opus/48000/2
+a=ssrc:1 cname:stream_1_cname
+a=ssrc:1 msid:local_stream audio_track_id_1
+a=ssrc:1 mslabel:local_stream
+a=ssrc:1 label:audio_track_id_1
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-7.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-7.sdp
new file mode 100644
index 0000000000..8bdacc2baa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-7.sdp
@@ -0,0 +1,12 @@
+m=video 9 RTP/SAVPF 120
+c=IN IP4 0.0.0.0
+a=rtcp:9 IN IP4 0.0.0.0
+a=ice-ufrag:ufrag_video
+a=ice-pwd:pwd_video
+a=mid:video_content_name
+a=sendrecv
+a=rtpmap:120 VP8/90000
+a=ssrc:2 cname:stream_1_cname
+a=ssrc:2 msid:local_stream video_track_id_1
+a=ssrc:2 mslabel:local_stream
+a=ssrc:2 label:video_track_id_1
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-8.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-8.sdp
new file mode 100644
index 0000000000..9688fc46a2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-8.sdp
@@ -0,0 +1,64 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS local_stream_1 local_stream_2
+m=audio 2345 RTP/SAVPF 111 103 104
+c=IN IP4 74.125.127.126
+a=rtcp:2347 IN IP4 74.125.127.126
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host generation 2
+a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx raddr 192.168.1.5 rport 2346 generation 2
+a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx raddr 192.168.1.5 rport 2348 generation 2
+a=ice-ufrag:ufrag_voice
+a=ice-pwd:pwd_voice
+a=mid:audio_content_name
+a=sendrecv
+a=rtcp-mux
+a=rtcp-rsize
+a=crypto:1 AES_CM_128_HMAC_SHA1_32 inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 dummy_session_params
+a=rtpmap:111 opus/48000/2
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=ssrc:1 cname:stream_1_cname
+a=ssrc:1 msid:local_stream_1 audio_track_id_1
+a=ssrc:1 mslabel:local_stream_1
+a=ssrc:1 label:audio_track_id_1
+a=ssrc:4 cname:stream_2_cname
+a=ssrc:4 msid:local_stream_2 audio_track_id_2
+a=ssrc:4 mslabel:local_stream_2
+a=ssrc:4 label:audio_track_id_2
+m=video 3457 RTP/SAVPF 120
+c=IN IP4 74.125.224.39
+a=rtcp:3456 IN IP4 74.125.224.39
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host generation 2
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host generation 2
+a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay generation 2
+a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay generation 2
+a=ice-ufrag:ufrag_video
+a=ice-pwd:pwd_video
+a=mid:video_content_name
+a=sendrecv
+a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32
+a=rtpmap:120 VP8/90000
+a=ssrc-group:FEC 2 3
+a=ssrc:2 cname:stream_1_cname
+a=ssrc:2 msid:local_stream_1 video_track_id_1
+a=ssrc:2 mslabel:local_stream_1
+a=ssrc:2 label:video_track_id_1
+a=ssrc:3 cname:stream_1_cname
+a=ssrc:3 msid:local_stream_1 video_track_id_1
+a=ssrc:3 mslabel:local_stream_1
+a=ssrc:3 label:video_track_id_1
+a=ssrc:5 cname:stream_2_cname
+a=ssrc:5 msid:local_stream_2 video_track_id_2
+a=ssrc:5 mslabel:local_stream_2
+a=ssrc:5 label:video_track_id_2
+a=ssrc:6 cname:stream_2_cname
+a=ssrc:6 msid:local_stream_2 video_track_id_3
+a=ssrc:6 mslabel:local_stream_2
+a=ssrc:6 label:video_track_id_3
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-9.sdp b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-9.sdp
new file mode 100644
index 0000000000..8c7c4c7c3d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp-corpus/unittest-9.sdp
@@ -0,0 +1,66 @@
+v=0
+o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1
+s=-
+t=0 0
+a=msid-semantic: WMS local_stream_1 local_stream_2
+m=audio 2345 RTP/SAVPF 111 103 104
+c=IN IP4 74.125.127.126
+a=rtcp:2347 IN IP4 74.125.127.126
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host generation 2
+a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx raddr 192.168.1.5 rport 2346 generation 2
+a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx raddr 192.168.1.5 rport 2348 generation 2
+a=ice-ufrag:ufrag_voice
+a=ice-pwd:pwd_voice
+a=mid:audio_content_name
+a=msid:local_stream_1 audio_track_id_1
+a=sendrecv
+a=rtcp-mux
+a=rtcp-rsize
+a=crypto:1 AES_CM_128_HMAC_SHA1_32 inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 dummy_session_params
+a=rtpmap:111 opus/48000/2
+a=rtpmap:103 ISAC/16000
+a=rtpmap:104 ISAC/32000
+a=ssrc:1 cname:stream_1_cname
+a=ssrc:1 msid:local_stream_1 audio_track_id_1
+a=ssrc:1 mslabel:local_stream_1
+a=ssrc:1 label:audio_track_id_1
+a=ssrc:4 cname:stream_2_cname
+a=ssrc:4 msid:local_stream_2 audio_track_id_2
+a=ssrc:4 mslabel:local_stream_2
+a=ssrc:4 label:audio_track_id_2
+m=video 3457 RTP/SAVPF 120
+c=IN IP4 74.125.224.39
+a=rtcp:3456 IN IP4 74.125.224.39
+a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host generation 2
+a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host generation 2
+a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host generation 2
+a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host generation 2
+a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay generation 2
+a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay generation 2
+a=ice-ufrag:ufrag_video
+a=ice-pwd:pwd_video
+a=mid:video_content_name
+a=msid:local_stream_1 video_track_id_1
+a=sendrecv
+a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32
+a=rtpmap:120 VP8/90000
+a=ssrc-group:FEC 2 3
+a=ssrc:2 cname:stream_1_cname
+a=ssrc:2 msid:local_stream_1 video_track_id_1
+a=ssrc:2 mslabel:local_stream_1
+a=ssrc:2 label:video_track_id_1
+a=ssrc:3 cname:stream_1_cname
+a=ssrc:3 msid:local_stream_1 video_track_id_1
+a=ssrc:3 mslabel:local_stream_1
+a=ssrc:3 label:video_track_id_1
+a=ssrc:5 cname:stream_2_cname
+a=ssrc:5 msid:local_stream_2 video_track_id_2
+a=ssrc:5 mslabel:local_stream_2
+a=ssrc:5 label:video_track_id_2
+a=ssrc:6 cname:stream_2_cname
+a=ssrc:6 msid:local_stream_2 video_track_id_3
+a=ssrc:6 mslabel:local_stream_2
+a=ssrc:6 label:video_track_id_3
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/sdp.tokens b/third_party/libwebrtc/test/fuzzers/corpora/sdp.tokens
new file mode 100644
index 0000000000..ddfdad5811
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/sdp.tokens
@@ -0,0 +1,56 @@
+"0.0.0.0"
+"127.0.0.1"
+"application"
+"audio"
+"candidate"
+"cname"
+"conference"
+"crypto"
+"default"
+"extmap"
+"fingerprint"
+"fmtp"
+"generation"
+"group"
+"host"
+"ice-lite"
+"ice-options"
+"ice-pwd"
+"ice-ufrag"
+"inactive"
+"IP4"
+"IP6"
+"ISAC"
+"label"
+"mid"
+"msid"
+"msid-semantic"
+"mslabel"
+"network-cost"
+"network-id"
+"prflx"
+"pwd"
+"raddr"
+"recvonly"
+"relay"
+"rport"
+"rtcp"
+"rtcp-fb"
+"rtcp-mux"
+"rtcp-rsize"
+"rtpmap"
+"sctpmap"
+"sctp-port"
+"sendonly"
+"sendrecv"
+"setup"
+"srflx"
+"ssrc"
+"ssrc-group"
+"tcptype"
+"typ"
+"ufrag"
+"video"
+"webrtc-datachannel"
+"WMS"
+"x-google-flag" \ No newline at end of file
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/0 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/0
new file mode 100644
index 0000000000..fbfcbf9105
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/0
@@ -0,0 +1 @@
+90742757
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/1 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/1
new file mode 100644
index 0000000000..587611a1d6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/1
@@ -0,0 +1 @@
+27.70727677
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/2 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/2
new file mode 100644
index 0000000000..45a4fb75db
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/2
@@ -0,0 +1 @@
+8
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/3 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/3
new file mode 100644
index 0000000000..a0a162047c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/3
@@ -0,0 +1 @@
+0.0001
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/4 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/4
new file mode 100644
index 0000000000..da24ab1137
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/4
@@ -0,0 +1 @@
+27277272727272727272
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/5 b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/5
new file mode 100644
index 0000000000..575c0a830c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/string_to_number-corpus/5
@@ -0,0 +1 @@
+999999999999999999999999999999999999999999999999999999999999999999999999999999
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/0.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/0.stun
new file mode 100644
index 0000000000..205997706f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/0.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/1.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/1.stun
new file mode 100644
index 0000000000..1f82c03ffa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/1.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/10.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/10.stun
new file mode 100644
index 0000000000..a6b06b2be8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/10.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/11.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/11.stun
new file mode 100644
index 0000000000..1f82c03ffa
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/11.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/12.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/12.stun
new file mode 100644
index 0000000000..cb91baa250
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/12.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/13.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/13.stun
new file mode 100644
index 0000000000..63298fce7d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/13.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/14.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/14.stun
new file mode 100644
index 0000000000..31f9f732d4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/14.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/15.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/15.stun
new file mode 100644
index 0000000000..3d15a67193
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/15.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/16.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/16.stun
new file mode 100644
index 0000000000..8ccf0af26d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/16.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/17.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/17.stun
new file mode 100644
index 0000000000..9c3efa0b80
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/17.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/2.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/2.stun
new file mode 100644
index 0000000000..50fe614adb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/2.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/3.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/3.stun
new file mode 100644
index 0000000000..50fe614adb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/3.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/4.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/4.stun
new file mode 100644
index 0000000000..a6b06b2be8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/4.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/5.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/5.stun
new file mode 100644
index 0000000000..c0a79fa7ab
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/5.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/6.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/6.stun
new file mode 100644
index 0000000000..1f43a4787d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/6.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/7.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/7.stun
new file mode 100644
index 0000000000..f5e824a68d
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/7.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/8.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/8.stun
new file mode 100644
index 0000000000..99f4e3bcf8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/8.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/9.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/9.stun
new file mode 100644
index 0000000000..cbbcab4e74
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/9.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/validator-crash-1.stun b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/validator-crash-1.stun
new file mode 100644
index 0000000000..5428c38d75
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun-corpus/validator-crash-1.stun
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/stun.tokens b/third_party/libwebrtc/test/fuzzers/corpora/stun.tokens
new file mode 100644
index 0000000000..e01130f12e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/stun.tokens
@@ -0,0 +1,2 @@
+"2112A442"
+"12214A24"
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/video_layers_allocation-corpus/vla-0 b/third_party/libwebrtc/test/fuzzers/corpora/video_layers_allocation-corpus/vla-0
new file mode 100644
index 0000000000..1b6fdf78f6
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/video_layers_allocation-corpus/vla-0
@@ -0,0 +1 @@
+@2?ÿÿ \ No newline at end of file
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/0cee4d5fd2905dc1fb2979f10a9724265b7075e2 b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/0cee4d5fd2905dc1fb2979f10a9724265b7075e2
new file mode 100644
index 0000000000..febe4ad130
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/0cee4d5fd2905dc1fb2979f10a9724265b7075e2
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a1c75436e1872a23391d58316d88c45da0fb7682 b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a1c75436e1872a23391d58316d88c45da0fb7682
new file mode 100644
index 0000000000..fc538ac83f
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a1c75436e1872a23391d58316d88c45da0fb7682
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a8b3fb7be82395c9462684c766841d668dc0029f b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a8b3fb7be82395c9462684c766841d668dc0029f
new file mode 100644
index 0000000000..1bd09373c8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/corpora/vp9-encoder-references-corpus/a8b3fb7be82395c9462684c766841d668dc0029f
Binary files differ
diff --git a/third_party/libwebrtc/test/fuzzers/dcsctp_packet_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/dcsctp_packet_fuzzer.cc
new file mode 100644
index 0000000000..2fc3fe10f1
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/dcsctp_packet_fuzzer.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/packet/chunk/chunk.h"
+#include "net/dcsctp/packet/sctp_packet.h"
+
+namespace webrtc {
+using dcsctp::SctpPacket;
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ absl::optional<SctpPacket> c =
+ SctpPacket::Parse(rtc::ArrayView<const uint8_t>(data, size),
+ /*disable_checksum_verification=*/true);
+
+ if (!c.has_value()) {
+ return;
+ }
+
+ for (const SctpPacket::ChunkDescriptor& desc : c->descriptors()) {
+ dcsctp::DebugConvertChunkToString(desc.data);
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/dcsctp_socket_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/dcsctp_socket_fuzzer.cc
new file mode 100644
index 0000000000..390cbb7f6c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/dcsctp_socket_fuzzer.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "net/dcsctp/fuzzers/dcsctp_fuzzers.h"
+#include "net/dcsctp/public/dcsctp_message.h"
+#include "net/dcsctp/public/dcsctp_options.h"
+#include "net/dcsctp/public/dcsctp_socket.h"
+#include "net/dcsctp/socket/dcsctp_socket.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ dcsctp::dcsctp_fuzzers::FuzzerCallbacks cb;
+ dcsctp::DcSctpOptions options;
+ options.disable_checksum_verification = true;
+ dcsctp::DcSctpSocket socket("A", cb, nullptr, options);
+
+ dcsctp::dcsctp_fuzzers::FuzzSocket(socket, cb,
+ rtc::ArrayView<const uint8_t>(data, size));
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/field_trial_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/field_trial_fuzzer.cc
new file mode 100644
index 0000000000..74fe65ce56
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/field_trial_fuzzer.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ std::string field_trial(reinterpret_cast<const char*>(data), size);
+ field_trial::InitFieldTrialsFromString(field_trial.c_str());
+ field_trial::FindFullName(field_trial);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/flexfec_header_reader_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/flexfec_header_reader_fuzzer.cc
new file mode 100644
index 0000000000..854cc8b811
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/flexfec_header_reader_fuzzer.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/flexfec_header_reader_writer.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+namespace webrtc {
+
+using Packet = ForwardErrorCorrection::Packet;
+using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket;
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ ReceivedFecPacket packet;
+ packet.pkt = rtc::scoped_refptr<Packet>(new Packet());
+ const size_t packet_size =
+ std::min(size, static_cast<size_t>(IP_PACKET_SIZE));
+ packet.pkt->data.SetSize(packet_size);
+ packet.pkt->data.EnsureCapacity(IP_PACKET_SIZE);
+ memcpy(packet.pkt->data.MutableData(), data, packet_size);
+
+ FlexfecHeaderReader flexfec_reader;
+ flexfec_reader.ReadFecHeader(&packet);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/flexfec_receiver_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/flexfec_receiver_fuzzer.cc
new file mode 100644
index 0000000000..67d603d3fc
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/flexfec_receiver_fuzzer.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "modules/rtp_rtcp/include/flexfec_receiver.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+
+namespace webrtc {
+
+namespace {
+class DummyCallback : public RecoveredPacketReceiver {
+ void OnRecoveredPacket(const RtpPacketReceived& packet) override {}
+};
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ constexpr size_t kMinDataNeeded = 12;
+ if (size < kMinDataNeeded || size > 2000) {
+ return;
+ }
+
+ uint32_t flexfec_ssrc;
+ memcpy(&flexfec_ssrc, data + 0, 4);
+ uint16_t flexfec_seq_num;
+ memcpy(&flexfec_seq_num, data + 4, 2);
+ uint32_t media_ssrc;
+ memcpy(&media_ssrc, data + 6, 4);
+ uint16_t media_seq_num;
+ memcpy(&media_seq_num, data + 10, 2);
+
+ DummyCallback callback;
+ FlexfecReceiver receiver(flexfec_ssrc, media_ssrc, &callback);
+
+ std::unique_ptr<uint8_t[]> packet;
+ size_t packet_length;
+ size_t i = kMinDataNeeded;
+ while (i < size) {
+ packet_length = kRtpHeaderSize + data[i++];
+ packet = std::unique_ptr<uint8_t[]>(new uint8_t[packet_length]);
+ if (i + packet_length >= size) {
+ break;
+ }
+ memcpy(packet.get(), data + i, packet_length);
+ i += packet_length;
+ if (i < size && data[i++] % 2 == 0) {
+ // Simulate FlexFEC packet.
+ ByteWriter<uint16_t>::WriteBigEndian(packet.get() + 2, flexfec_seq_num++);
+ ByteWriter<uint32_t>::WriteBigEndian(packet.get() + 8, flexfec_ssrc);
+ } else {
+ // Simulate media packet.
+ ByteWriter<uint16_t>::WriteBigEndian(packet.get() + 2, media_seq_num++);
+ ByteWriter<uint32_t>::WriteBigEndian(packet.get() + 8, media_ssrc);
+ }
+ RtpPacketReceived parsed_packet;
+ if (parsed_packet.Parse(packet.get(), packet_length)) {
+ receiver.OnRtpPacket(parsed_packet);
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/flexfec_sender_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/flexfec_sender_fuzzer.cc
new file mode 100644
index 0000000000..8ddd1c0fe0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/flexfec_sender_fuzzer.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "modules/rtp_rtcp/include/flexfec_sender.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+
+constexpr int kFlexfecPayloadType = 123;
+constexpr uint32_t kMediaSsrc = 1234;
+constexpr uint32_t kFlexfecSsrc = 5678;
+const char kNoMid[] = "";
+const std::vector<RtpExtension> kNoRtpHeaderExtensions;
+const std::vector<RtpExtensionSize> kNoRtpHeaderExtensionSizes;
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ size_t i = 0;
+ if (size < 5 || size > 200) {
+ return;
+ }
+ SimulatedClock clock(1 + data[i++]);
+ FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid,
+ kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes,
+ nullptr /* rtp_state */, &clock);
+ FecProtectionParams params = {
+ data[i++], static_cast<int>(data[i++] % 100),
+ data[i++] <= 127 ? kFecMaskRandom : kFecMaskBursty};
+ sender.SetProtectionParameters(params, params);
+ uint16_t seq_num = data[i++];
+
+ while (i + 1 < size) {
+ // Everything past the base RTP header (12 bytes) is payload,
+ // from the perspective of FlexFEC.
+ size_t payload_size = data[i++];
+ if (i + kRtpHeaderSize + payload_size >= size)
+ break;
+ std::unique_ptr<uint8_t[]> packet(
+ new uint8_t[kRtpHeaderSize + payload_size]);
+ memcpy(packet.get(), &data[i], kRtpHeaderSize + payload_size);
+ i += kRtpHeaderSize + payload_size;
+ ByteWriter<uint16_t>::WriteBigEndian(&packet[2], seq_num++);
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[8], kMediaSsrc);
+ RtpPacketToSend rtp_packet(nullptr);
+ if (!rtp_packet.Parse(packet.get(), kRtpHeaderSize + payload_size))
+ break;
+ sender.AddPacketAndGenerateFec(rtp_packet);
+ sender.GetFecPackets();
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/forward_error_correction_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/forward_error_correction_fuzzer.cc
new file mode 100644
index 0000000000..04a459bc71
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/forward_error_correction_fuzzer.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "rtc_base/byte_buffer.h"
+
+namespace webrtc {
+
+namespace {
+constexpr uint32_t kMediaSsrc = 100200300;
+constexpr uint32_t kFecSsrc = 111222333;
+
+constexpr size_t kPacketSize = 50;
+constexpr size_t kMaxPacketsInBuffer = 48;
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 5000) {
+ return;
+ }
+ // Object under test.
+ std::unique_ptr<ForwardErrorCorrection> fec =
+ ForwardErrorCorrection::CreateFlexfec(kFecSsrc, kMediaSsrc);
+
+ // Entropy from fuzzer.
+ rtc::ByteBufferReader fuzz_buffer(reinterpret_cast<const char*>(data), size);
+
+ // Initial stream state.
+ uint16_t media_seqnum;
+ if (!fuzz_buffer.ReadUInt16(&media_seqnum))
+ return;
+ const uint16_t original_media_seqnum = media_seqnum;
+ uint16_t fec_seqnum;
+ if (!fuzz_buffer.ReadUInt16(&fec_seqnum))
+ return;
+
+ // Existing packets in the packet buffer.
+ ForwardErrorCorrection::RecoveredPacketList recovered_packets;
+ uint8_t num_existing_recovered_packets;
+ if (!fuzz_buffer.ReadUInt8(&num_existing_recovered_packets))
+ return;
+ for (size_t i = 0; i < num_existing_recovered_packets % kMaxPacketsInBuffer;
+ ++i) {
+ ForwardErrorCorrection::RecoveredPacket* recovered_packet =
+ new ForwardErrorCorrection::RecoveredPacket();
+ recovered_packet->pkt = rtc::scoped_refptr<ForwardErrorCorrection::Packet>(
+ new ForwardErrorCorrection::Packet());
+ recovered_packet->pkt->data.SetSize(kPacketSize);
+ memset(recovered_packet->pkt->data.MutableData(), 0, kPacketSize);
+ recovered_packet->ssrc = kMediaSsrc;
+ recovered_packet->seq_num = media_seqnum++;
+ recovered_packets.emplace_back(recovered_packet);
+ }
+
+ // New packets received from the network.
+ ForwardErrorCorrection::ReceivedPacket received_packet;
+ received_packet.pkt = rtc::scoped_refptr<ForwardErrorCorrection::Packet>(
+ new ForwardErrorCorrection::Packet());
+ received_packet.pkt->data.SetSize(kPacketSize);
+ received_packet.pkt->data.EnsureCapacity(IP_PACKET_SIZE);
+ uint8_t* packet_buffer = received_packet.pkt->data.MutableData();
+ uint8_t reordering;
+ uint16_t seq_num_diff;
+ uint8_t packet_type;
+ uint8_t packet_loss;
+ while (true) {
+ if (!fuzz_buffer.ReadBytes(reinterpret_cast<char*>(packet_buffer),
+ kPacketSize)) {
+ return;
+ }
+ if (!fuzz_buffer.ReadUInt8(&reordering))
+ return;
+ if (!fuzz_buffer.ReadUInt16(&seq_num_diff))
+ return;
+ if (!fuzz_buffer.ReadUInt8(&packet_type))
+ return;
+ if (!fuzz_buffer.ReadUInt8(&packet_loss))
+ return;
+
+ if (reordering % 10 != 0)
+ seq_num_diff = 0;
+
+ if (packet_type % 2 == 0) {
+ received_packet.is_fec = true;
+ received_packet.ssrc = kFecSsrc;
+ received_packet.seq_num = seq_num_diff + fec_seqnum++;
+
+ // Overwrite parts of the FlexFEC header for fuzzing efficiency.
+ packet_buffer[0] = 0; // R, F bits.
+ ByteWriter<uint8_t>::WriteBigEndian(&packet_buffer[8], 1); // SSRCCount.
+ ByteWriter<uint32_t>::WriteBigEndian(&packet_buffer[12],
+ kMediaSsrc); // SSRC_i.
+ ByteWriter<uint16_t>::WriteBigEndian(
+ &packet_buffer[16], original_media_seqnum); // SN base_i.
+ } else {
+ received_packet.is_fec = false;
+ received_packet.ssrc = kMediaSsrc;
+ received_packet.seq_num = seq_num_diff + media_seqnum++;
+ }
+
+ if (packet_loss % 10 == 0)
+ continue;
+
+ fec->DecodeFec(received_packet, &recovered_packets);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/frame_buffer2_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/frame_buffer2_fuzzer.cc
new file mode 100644
index 0000000000..ec1bbbb4c1
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/frame_buffer2_fuzzer.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/task_queue/task_queue_base.h"
+#include "modules/video_coding/frame_buffer2.h"
+#include "modules/video_coding/timing/timing.h"
+#include "test/scoped_key_value_config.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+namespace {
+
+// When DataReader runs out of data provided in the constructor it will
+// just set/return 0 instead.
+struct DataReader {
+ DataReader(const uint8_t* data, size_t size) : data_(data), size_(size) {}
+
+ void CopyTo(void* destination, size_t dest_size) {
+ memset(destination, 0, dest_size);
+
+ size_t bytes_to_copy = std::min(size_ - offset_, dest_size);
+ memcpy(destination, data_ + offset_, bytes_to_copy);
+ offset_ += bytes_to_copy;
+ }
+
+ template <typename T>
+ T GetNum() {
+ T res;
+ if (offset_ + sizeof(res) < size_) {
+ memcpy(&res, data_ + offset_, sizeof(res));
+ offset_ += sizeof(res);
+ return res;
+ }
+
+ offset_ = size_;
+ return T(0);
+ }
+
+ bool MoreToRead() { return offset_ < size_; }
+
+ const uint8_t* const data_;
+ size_t size_;
+ size_t offset_ = 0;
+};
+
+class FuzzyFrameObject : public EncodedFrame {
+ public:
+ FuzzyFrameObject() {}
+ ~FuzzyFrameObject() {}
+
+ int64_t ReceivedTime() const override { return 0; }
+ int64_t RenderTime() const override { return _renderTimeMs; }
+};
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000) {
+ return;
+ }
+ DataReader reader(data, size);
+ GlobalSimulatedTimeController time_controller(Timestamp::Seconds(0));
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue =
+ time_controller.GetTaskQueueFactory()->CreateTaskQueue(
+ "time_tq", TaskQueueFactory::Priority::NORMAL);
+ test::ScopedKeyValueConfig field_trials;
+ VCMTiming timing(time_controller.GetClock(), field_trials);
+ video_coding::FrameBuffer frame_buffer(time_controller.GetClock(), &timing,
+ field_trials);
+
+ bool next_frame_task_running = false;
+
+ while (reader.MoreToRead()) {
+ if (reader.GetNum<uint8_t>() % 2) {
+ std::unique_ptr<FuzzyFrameObject> frame(new FuzzyFrameObject());
+ frame->SetId(reader.GetNum<int64_t>());
+ frame->SetSpatialIndex(reader.GetNum<uint8_t>() % 5);
+ frame->SetTimestamp(reader.GetNum<uint32_t>());
+ frame->num_references =
+ reader.GetNum<uint8_t>() % EncodedFrame::kMaxFrameReferences;
+
+ for (size_t r = 0; r < frame->num_references; ++r)
+ frame->references[r] = reader.GetNum<int64_t>();
+
+ frame_buffer.InsertFrame(std::move(frame));
+ } else {
+ if (!next_frame_task_running) {
+ next_frame_task_running = true;
+ bool keyframe_required = reader.GetNum<uint8_t>() % 2;
+ int max_wait_time_ms = reader.GetNum<uint8_t>();
+ task_queue->PostTask([&task_queue, &frame_buffer,
+ &next_frame_task_running, keyframe_required,
+ max_wait_time_ms] {
+ frame_buffer.NextFrame(
+ max_wait_time_ms, keyframe_required, task_queue.get(),
+ [&next_frame_task_running](std::unique_ptr<EncodedFrame> frame) {
+ next_frame_task_running = false;
+ });
+ });
+ }
+ }
+
+ time_controller.AdvanceTime(TimeDelta::Millis(reader.GetNum<uint8_t>()));
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/frame_buffer_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/frame_buffer_fuzzer.cc
new file mode 100644
index 0000000000..e58d5e9f98
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/frame_buffer_fuzzer.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/array_view.h"
+#include "api/video/encoded_frame.h"
+#include "api/video/frame_buffer.h"
+#include "rtc_base/numerics/sequence_number_unwrapper.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+namespace {
+class FuzzyFrameObject : public EncodedFrame {
+ public:
+ int64_t ReceivedTime() const override { return 0; }
+ int64_t RenderTime() const override { return 0; }
+};
+
+constexpr int kFrameIdLength = 1 << 15;
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 10000) {
+ return;
+ }
+
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/100, /*max_decode_history=*/1000,
+ field_trials);
+ test::FuzzDataHelper helper(rtc::MakeArrayView(data, size));
+ SeqNumUnwrapper<uint16_t, kFrameIdLength> unwrapper;
+
+ while (helper.BytesLeft() > 0) {
+ int action = helper.ReadOrDefaultValue<uint8_t>(0) % 6;
+
+ switch (action) {
+ case 0: {
+ buffer.LastContinuousFrameId();
+ break;
+ }
+ case 1: {
+ buffer.LastContinuousTemporalUnitFrameId();
+ break;
+ }
+ case 2: {
+ buffer.DecodableTemporalUnitsInfo();
+ break;
+ }
+ case 3: {
+ buffer.ExtractNextDecodableTemporalUnit();
+ break;
+ }
+ case 4: {
+ buffer.DropNextDecodableTemporalUnit();
+ break;
+ }
+ case 5: {
+ auto frame = std::make_unique<FuzzyFrameObject>();
+ frame->SetTimestamp(helper.ReadOrDefaultValue<uint32_t>(0));
+ int64_t wire_id =
+ helper.ReadOrDefaultValue<uint16_t>(0) & (kFrameIdLength - 1);
+ frame->SetId(unwrapper.Unwrap(wire_id));
+ frame->is_last_spatial_layer = helper.ReadOrDefaultValue<bool>(false);
+
+ frame->num_references = helper.ReadOrDefaultValue<uint8_t>(0) %
+ EncodedFrame::kMaxFrameReferences;
+
+ for (uint8_t i = 0; i < frame->num_references; ++i) {
+ frame->references[i] = helper.ReadOrDefaultValue<int64_t>(0);
+ }
+
+ buffer.InsertFrame(std::move(frame));
+ break;
+ }
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.cc b/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.cc
new file mode 100644
index 0000000000..866f7bc4b8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace test {
+
+FuzzDataHelper::FuzzDataHelper(rtc::ArrayView<const uint8_t> data)
+ : data_(data) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.h b/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.h
new file mode 100644
index 0000000000..cedc31d0a4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/fuzz_data_helper.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUZZERS_FUZZ_DATA_HELPER_H_
+#define TEST_FUZZERS_FUZZ_DATA_HELPER_H_
+
+#include <limits>
+
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace test {
+
+// Helper class to take care of the fuzzer input, read from it, and keep track
+// of when the end of the data has been reached.
+class FuzzDataHelper {
+ public:
+ explicit FuzzDataHelper(rtc::ArrayView<const uint8_t> data);
+
+ // Returns true if n bytes can be read.
+ bool CanReadBytes(size_t n) const { return data_ix_ + n <= data_.size(); }
+
+ // Reads and returns data of type T.
+ template <typename T>
+ T Read() {
+ RTC_CHECK(CanReadBytes(sizeof(T)));
+ T x = ByteReader<T>::ReadLittleEndian(&data_[data_ix_]);
+ data_ix_ += sizeof(T);
+ return x;
+ }
+
+ // Reads and returns data of type T. Returns default_value if not enough
+ // fuzzer input remains to read a T.
+ template <typename T>
+ T ReadOrDefaultValue(T default_value) {
+ if (!CanReadBytes(sizeof(T))) {
+ return default_value;
+ }
+ return Read<T>();
+ }
+
+ // Like ReadOrDefaultValue, but replaces the value 0 with default_value.
+ template <typename T>
+ T ReadOrDefaultValueNotZero(T default_value) {
+ static_assert(std::is_integral<T>::value, "");
+ T x = ReadOrDefaultValue(default_value);
+ return x == 0 ? default_value : x;
+ }
+
+ // Returns one of the elements from the provided input array. The selection
+ // is based on the fuzzer input data. If not enough fuzzer data is available,
+ // the method will return the first element in the input array. The reason for
+ // not flagging this as an error is to allow the method to be called from
+ // class constructors, and in constructors we typically do not handle
+ // errors. The code will work anyway, and the fuzzer will likely see that
+ // providing more data will actually make this method return something else.
+ template <typename T, size_t N>
+ T SelectOneOf(const T (&select_from)[N]) {
+ static_assert(N <= std::numeric_limits<uint8_t>::max(), "");
+ // Read an index between 0 and select_from.size() - 1 from the fuzzer data.
+ uint8_t index = ReadOrDefaultValue<uint8_t>(0) % N;
+ return select_from[index];
+ }
+
+ rtc::ArrayView<const uint8_t> ReadByteArray(size_t bytes) {
+ if (!CanReadBytes(bytes)) {
+ return rtc::ArrayView<const uint8_t>(nullptr, 0);
+ }
+ const size_t index_to_return = data_ix_;
+ data_ix_ += bytes;
+ return data_.subview(index_to_return, bytes);
+ }
+
+ // If sizeof(T) > BytesLeft then the remaining bytes will be used and the rest
+ // of the object will be zero initialized.
+ template <typename T>
+ void CopyTo(T* object) {
+ memset(object, 0, sizeof(T));
+
+ size_t bytes_to_copy = std::min(BytesLeft(), sizeof(T));
+ memcpy(object, data_.data() + data_ix_, bytes_to_copy);
+ data_ix_ += bytes_to_copy;
+ }
+
+ size_t BytesRead() const { return data_ix_; }
+
+ size_t BytesLeft() const { return data_.size() - data_ix_; }
+
+ private:
+ rtc::ArrayView<const uint8_t> data_;
+ size_t data_ix_ = 0;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FUZZERS_FUZZ_DATA_HELPER_H_
diff --git a/third_party/libwebrtc/test/fuzzers/h264_bitstream_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/h264_bitstream_parser_fuzzer.cc
new file mode 100644
index 0000000000..cd1128c0b4
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/h264_bitstream_parser_fuzzer.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <stdint.h>
+
+#include "common_video/h264/h264_bitstream_parser.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ H264BitstreamParser h264_bitstream_parser;
+ h264_bitstream_parser.ParseBitstream(
+ rtc::ArrayView<const uint8_t>(data, size));
+ h264_bitstream_parser.GetLastSliceQp();
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/h264_depacketizer_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/h264_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..97127228ed
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/h264_depacketizer_fuzzer.cc
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 200000)
+ return;
+ VideoRtpDepacketizerH264 depacketizer;
+ depacketizer.Parse(rtc::CopyOnWriteBuffer(data, size));
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/neteq_rtp_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/neteq_rtp_fuzzer.cc
new file mode 100644
index 0000000000..348c84f040
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/neteq_rtp_fuzzer.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <cmath>
+#include <cstring>
+#include <memory>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h"
+#include "modules/audio_coding/neteq/tools/audio_checksum.h"
+#include "modules/audio_coding/neteq/tools/encode_neteq_input.h"
+#include "modules/audio_coding/neteq/tools/neteq_test.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr int kPayloadType = 95;
+
+class SineGenerator : public EncodeNetEqInput::Generator {
+ public:
+ explicit SineGenerator(int sample_rate_hz)
+ : sample_rate_hz_(sample_rate_hz) {}
+
+ rtc::ArrayView<const int16_t> Generate(size_t num_samples) override {
+ if (samples_.size() < num_samples) {
+ samples_.resize(num_samples);
+ }
+
+ rtc::ArrayView<int16_t> output(samples_.data(), num_samples);
+ for (auto& x : output) {
+ x = static_cast<int16_t>(2000.0 * std::sin(phase_));
+ phase_ += 2 * kPi * kFreqHz / sample_rate_hz_;
+ }
+ return output;
+ }
+
+ private:
+ static constexpr int kFreqHz = 300; // The sinewave frequency.
+ const int sample_rate_hz_;
+ const double kPi = std::acos(-1);
+ std::vector<int16_t> samples_;
+ double phase_ = 0.0;
+};
+
+class FuzzRtpInput : public NetEqInput {
+ public:
+ explicit FuzzRtpInput(rtc::ArrayView<const uint8_t> data) : data_(data) {
+ AudioEncoderPcm16B::Config config;
+ config.payload_type = kPayloadType;
+ config.sample_rate_hz = 32000;
+ std::unique_ptr<AudioEncoder> encoder(new AudioEncoderPcm16B(config));
+ std::unique_ptr<EncodeNetEqInput::Generator> generator(
+ new SineGenerator(config.sample_rate_hz));
+ input_.reset(new EncodeNetEqInput(std::move(generator), std::move(encoder),
+ std::numeric_limits<int64_t>::max()));
+ packet_ = input_->PopPacket();
+ FuzzHeader();
+ MaybeFuzzPayload();
+ }
+
+ absl::optional<int64_t> NextPacketTime() const override {
+ return packet_->time_ms;
+ }
+
+ absl::optional<int64_t> NextOutputEventTime() const override {
+ return input_->NextOutputEventTime();
+ }
+
+ std::unique_ptr<PacketData> PopPacket() override {
+ RTC_DCHECK(packet_);
+ std::unique_ptr<PacketData> packet_to_return = std::move(packet_);
+ packet_ = input_->PopPacket();
+ FuzzHeader();
+ MaybeFuzzPayload();
+ return packet_to_return;
+ }
+
+ void AdvanceOutputEvent() override { return input_->AdvanceOutputEvent(); }
+
+ bool ended() const override { return ended_; }
+
+ absl::optional<RTPHeader> NextHeader() const override {
+ RTC_DCHECK(packet_);
+ return packet_->header;
+ }
+
+ private:
+ void FuzzHeader() {
+ constexpr size_t kNumBytesToFuzz = 11;
+ if (data_ix_ + kNumBytesToFuzz > data_.size()) {
+ ended_ = true;
+ return;
+ }
+ RTC_DCHECK(packet_);
+ const size_t start_ix = data_ix_;
+ packet_->header.payloadType =
+ ByteReader<uint8_t>::ReadLittleEndian(&data_[data_ix_]);
+ packet_->header.payloadType &= 0x7F;
+ data_ix_ += sizeof(uint8_t);
+ packet_->header.sequenceNumber =
+ ByteReader<uint16_t>::ReadLittleEndian(&data_[data_ix_]);
+ data_ix_ += sizeof(uint16_t);
+ packet_->header.timestamp =
+ ByteReader<uint32_t>::ReadLittleEndian(&data_[data_ix_]);
+ data_ix_ += sizeof(uint32_t);
+ packet_->header.ssrc =
+ ByteReader<uint32_t>::ReadLittleEndian(&data_[data_ix_]);
+ data_ix_ += sizeof(uint32_t);
+ RTC_CHECK_EQ(data_ix_ - start_ix, kNumBytesToFuzz);
+ }
+
+ void MaybeFuzzPayload() {
+ // Read one byte of fuzz data to determine how many payload bytes to fuzz.
+ if (data_ix_ + 1 > data_.size()) {
+ ended_ = true;
+ return;
+ }
+ size_t bytes_to_fuzz = data_[data_ix_++];
+
+ // Restrict number of bytes to fuzz to 16; a reasonably low number enough to
+ // cover a few RED headers. Also don't write outside the payload length.
+ bytes_to_fuzz = std::min(bytes_to_fuzz % 16, packet_->payload.size());
+
+ if (bytes_to_fuzz == 0)
+ return;
+
+ if (data_ix_ + bytes_to_fuzz > data_.size()) {
+ ended_ = true;
+ return;
+ }
+
+ std::memcpy(packet_->payload.data(), &data_[data_ix_], bytes_to_fuzz);
+ data_ix_ += bytes_to_fuzz;
+ }
+
+ bool ended_ = false;
+ rtc::ArrayView<const uint8_t> data_;
+ size_t data_ix_ = 0;
+ std::unique_ptr<EncodeNetEqInput> input_;
+ std::unique_ptr<PacketData> packet_;
+};
+} // namespace
+
+void FuzzOneInputTest(const uint8_t* data, size_t size) {
+ std::unique_ptr<FuzzRtpInput> input(
+ new FuzzRtpInput(rtc::ArrayView<const uint8_t>(data, size)));
+ std::unique_ptr<AudioChecksum> output(new AudioChecksum);
+ NetEqTest::Callbacks callbacks;
+ NetEq::Config config;
+ auto codecs = NetEqTest::StandardDecoderMap();
+ // kPayloadType is the payload type that will be used for encoding. Verify
+ // that it is included in the standard decoder map, and that it points to the
+ // expected decoder type.
+ const auto it = codecs.find(kPayloadType);
+ RTC_CHECK(it != codecs.end());
+ RTC_CHECK(it->second == SdpAudioFormat("L16", 32000, 1));
+
+ NetEqTest test(config, CreateBuiltinAudioDecoderFactory(), codecs,
+ /*text_log=*/nullptr, /*neteq_factory=*/nullptr,
+ std::move(input), std::move(output), callbacks);
+ test.Run();
+}
+
+} // namespace test
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 70000) {
+ return;
+ }
+ test::FuzzOneInputTest(data, size);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/neteq_signal_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/neteq_signal_fuzzer.cc
new file mode 100644
index 0000000000..8653f137a2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/neteq_signal_fuzzer.cc
@@ -0,0 +1,201 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cmath>
+#include <limits>
+#include <memory>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h"
+#include "modules/audio_coding/neteq/tools/audio_checksum.h"
+#include "modules/audio_coding/neteq/tools/encode_neteq_input.h"
+#include "modules/audio_coding/neteq/tools/neteq_test.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/random.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+// Generate a mixture of sine wave and gaussian noise.
+class SineAndNoiseGenerator : public EncodeNetEqInput::Generator {
+ public:
+ // The noise generator is seeded with a value from the fuzzer data, but 0 is
+ // avoided (since it is not allowed by the Random class).
+ SineAndNoiseGenerator(int sample_rate_hz, FuzzDataHelper* fuzz_data)
+ : sample_rate_hz_(sample_rate_hz),
+ fuzz_data_(*fuzz_data),
+ noise_generator_(fuzz_data_.ReadOrDefaultValueNotZero<uint64_t>(1)) {}
+
+ // Generates num_samples of the sine-gaussian mixture.
+ rtc::ArrayView<const int16_t> Generate(size_t num_samples) override {
+ if (samples_.size() < num_samples) {
+ samples_.resize(num_samples);
+ }
+
+ rtc::ArrayView<int16_t> output(samples_.data(), num_samples);
+ // Randomize an amplitude between 0 and 32768; use 65000/2 if we are out of
+ // fuzzer data.
+ const float amplitude = fuzz_data_.ReadOrDefaultValue<uint16_t>(65000) / 2;
+ // Randomize a noise standard deviation between 0 and 1999.
+ const float noise_std = fuzz_data_.ReadOrDefaultValue<uint16_t>(0) % 2000;
+ for (auto& x : output) {
+ x = rtc::saturated_cast<int16_t>(amplitude * std::sin(phase_) +
+ noise_generator_.Gaussian(0, noise_std));
+ phase_ += 2 * kPi * kFreqHz / sample_rate_hz_;
+ }
+ return output;
+ }
+
+ private:
+ static constexpr int kFreqHz = 300; // The sinewave frequency.
+ const int sample_rate_hz_;
+ const double kPi = std::acos(-1);
+ std::vector<int16_t> samples_;
+ double phase_ = 0.0;
+ FuzzDataHelper& fuzz_data_;
+ Random noise_generator_;
+};
+
+class FuzzSignalInput : public NetEqInput {
+ public:
+ explicit FuzzSignalInput(FuzzDataHelper* fuzz_data,
+ int sample_rate,
+ uint8_t payload_type)
+ : fuzz_data_(*fuzz_data) {
+ AudioEncoderPcm16B::Config config;
+ config.payload_type = payload_type;
+ config.sample_rate_hz = sample_rate;
+ std::unique_ptr<AudioEncoder> encoder(new AudioEncoderPcm16B(config));
+ std::unique_ptr<EncodeNetEqInput::Generator> generator(
+ new SineAndNoiseGenerator(config.sample_rate_hz, fuzz_data));
+ input_.reset(new EncodeNetEqInput(std::move(generator), std::move(encoder),
+ std::numeric_limits<int64_t>::max()));
+ packet_ = input_->PopPacket();
+
+ // Select an output event period. This is how long time we wait between each
+ // call to NetEq::GetAudio. 10 ms is nominal, 9 and 11 ms will both lead to
+ // clock drift (in different directions).
+ constexpr int output_event_periods[] = {9, 10, 11};
+ output_event_period_ms_ = fuzz_data_.SelectOneOf(output_event_periods);
+ }
+
+ absl::optional<int64_t> NextPacketTime() const override {
+ return packet_->time_ms;
+ }
+
+ absl::optional<int64_t> NextOutputEventTime() const override {
+ return next_output_event_ms_;
+ }
+
+ std::unique_ptr<PacketData> PopPacket() override {
+ RTC_DCHECK(packet_);
+ std::unique_ptr<PacketData> packet_to_return = std::move(packet_);
+ do {
+ packet_ = input_->PopPacket();
+ // If the next value from the fuzzer input is 0, the packet is discarded
+ // and the next one is pulled from the source.
+ } while (fuzz_data_.CanReadBytes(1) && fuzz_data_.Read<uint8_t>() == 0);
+ if (fuzz_data_.CanReadBytes(1)) {
+ // Generate jitter by setting an offset for the arrival time.
+ const int8_t arrival_time_offset_ms = fuzz_data_.Read<int8_t>();
+ // The arrival time can not be before the previous packets.
+ packet_->time_ms = std::max(packet_to_return->time_ms,
+ packet_->time_ms + arrival_time_offset_ms);
+ } else {
+ // Mark that we are at the end of the test. However, the current packet is
+ // still valid (but it may not have been fuzzed as expected).
+ ended_ = true;
+ }
+ return packet_to_return;
+ }
+
+ void AdvanceOutputEvent() override {
+ next_output_event_ms_ += output_event_period_ms_;
+ }
+
+ bool ended() const override { return ended_; }
+
+ absl::optional<RTPHeader> NextHeader() const override {
+ RTC_DCHECK(packet_);
+ return packet_->header;
+ }
+
+ private:
+ bool ended_ = false;
+ FuzzDataHelper& fuzz_data_;
+ std::unique_ptr<EncodeNetEqInput> input_;
+ std::unique_ptr<PacketData> packet_;
+ int64_t next_output_event_ms_ = 0;
+ int64_t output_event_period_ms_ = 10;
+};
+
+template <class T>
+bool MapHas(const std::map<int, T>& m, int key, const T& value) {
+ const auto it = m.find(key);
+ return (it != m.end() && it->second == value);
+}
+
+} // namespace
+
+void FuzzOneInputTest(const uint8_t* data, size_t size) {
+ if (size < 1 || size > 65000) {
+ return;
+ }
+
+ FuzzDataHelper fuzz_data(rtc::ArrayView<const uint8_t>(data, size));
+
+ // Allowed sample rates and payload types used in the test.
+ std::pair<int, uint8_t> rate_types[] = {
+ {8000, 93}, {16000, 94}, {32000, 95}, {48000, 96}};
+ const auto rate_type = fuzz_data.SelectOneOf(rate_types);
+ const int sample_rate = rate_type.first;
+ const uint8_t payload_type = rate_type.second;
+
+ // Set up the input signal generator.
+ std::unique_ptr<FuzzSignalInput> input(
+ new FuzzSignalInput(&fuzz_data, sample_rate, payload_type));
+
+ // Output sink for the test.
+ std::unique_ptr<AudioChecksum> output(new AudioChecksum);
+
+ // Configure NetEq and the NetEqTest object.
+ NetEqTest::Callbacks callbacks;
+ NetEq::Config config;
+ config.enable_post_decode_vad = true;
+ config.enable_fast_accelerate = true;
+ auto codecs = NetEqTest::StandardDecoderMap();
+ // rate_types contains the payload types that will be used for encoding.
+ // Verify that they all are included in the standard decoder map, and that
+ // they point to the expected decoder types.
+ RTC_CHECK(
+ MapHas(codecs, rate_types[0].second, SdpAudioFormat("l16", 8000, 1)));
+ RTC_CHECK(
+ MapHas(codecs, rate_types[1].second, SdpAudioFormat("l16", 16000, 1)));
+ RTC_CHECK(
+ MapHas(codecs, rate_types[2].second, SdpAudioFormat("l16", 32000, 1)));
+ RTC_CHECK(
+ MapHas(codecs, rate_types[3].second, SdpAudioFormat("l16", 48000, 1)));
+
+ NetEqTest test(config, CreateBuiltinAudioDecoderFactory(), codecs,
+ /*text_log=*/nullptr, /*neteq_factory=*/nullptr,
+ std::move(input), std::move(output), callbacks);
+ test.Run();
+}
+
+} // namespace test
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ test::FuzzOneInputTest(data, size);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/pseudotcp_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/pseudotcp_parser_fuzzer.cc
new file mode 100644
index 0000000000..78ddf0e455
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/pseudotcp_parser_fuzzer.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "p2p/base/pseudo_tcp.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+class FakeIPseudoTcpNotify : public cricket::IPseudoTcpNotify {
+ public:
+ void OnTcpOpen(cricket::PseudoTcp* tcp) {}
+ void OnTcpReadable(cricket::PseudoTcp* tcp) {}
+ void OnTcpWriteable(cricket::PseudoTcp* tcp) {}
+ void OnTcpClosed(cricket::PseudoTcp* tcp, uint32_t error) {}
+
+ cricket::IPseudoTcpNotify::WriteResult TcpWritePacket(cricket::PseudoTcp* tcp,
+ const char* buffer,
+ size_t len) {
+ return cricket::IPseudoTcpNotify::WriteResult::WR_SUCCESS;
+ }
+};
+
+struct Environment {
+ explicit Environment(cricket::IPseudoTcpNotify* notifier)
+ : ptcp(notifier, 0) {}
+
+ // We need the thread to avoid some uninteresting crashes, since the
+ // production code expects there to be a thread object available.
+ rtc::AutoThread thread;
+ cricket::PseudoTcp ptcp;
+};
+
+Environment* env = new Environment(new FakeIPseudoTcpNotify());
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ env->ptcp.NotifyPacket(reinterpret_cast<const char*>(data), size);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/residual_echo_detector_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/residual_echo_detector_fuzzer.cc
new file mode 100644
index 0000000000..0efe81f220
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/residual_echo_detector_fuzzer.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <string.h>
+
+#include <algorithm>
+#include <bitset>
+#include <vector>
+
+#include "api/audio/echo_detector_creator.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ // Number of times to update the echo detector.
+ constexpr size_t kNrOfUpdates = 7;
+ // Each round of updates requires a call to both AnalyzeRender and
+ // AnalyzeCapture, so the amount of needed input bytes doubles. Also, two
+ // bytes are used to set the call order.
+ constexpr size_t kNrOfNeededInputBytes = 2 * kNrOfUpdates * sizeof(float) + 2;
+ // The maximum audio energy that an audio frame can have is equal to the
+ // number of samples in the frame multiplied by 2^30. We use a single sample
+ // to represent an audio frame in this test, so it should have a maximum value
+ // equal to the square root of that value.
+ const float maxFuzzedValue = sqrtf(20 * 48) * 32768;
+ if (size < kNrOfNeededInputBytes) {
+ return;
+ }
+ size_t read_idx = 0;
+ // Use the first two bytes to choose the call order.
+ uint16_t call_order_int;
+ memcpy(&call_order_int, &data[read_idx], 2);
+ read_idx += 2;
+ std::bitset<16> call_order(call_order_int);
+
+ rtc::scoped_refptr<EchoDetector> echo_detector = CreateEchoDetector();
+ std::vector<float> input(1);
+ // Call AnalyzeCaptureAudio once to prevent the flushing of the buffer.
+ echo_detector->AnalyzeCaptureAudio(input);
+ for (size_t i = 0; i < 2 * kNrOfUpdates; ++i) {
+ // Convert 4 input bytes to a float.
+ RTC_DCHECK_LE(read_idx + sizeof(float), size);
+ memcpy(input.data(), &data[read_idx], sizeof(float));
+ read_idx += sizeof(float);
+ if (!isfinite(input[0]) || fabs(input[0]) > maxFuzzedValue) {
+ // Ignore infinity, nan values and values that are unrealistically large.
+ continue;
+ }
+ if (call_order[i]) {
+ echo_detector->AnalyzeRenderAudio(input);
+ } else {
+ echo_detector->AnalyzeCaptureAudio(input);
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtcp_receiver_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtcp_receiver_fuzzer.cc
new file mode 100644
index 0000000000..8bad9e456a
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtcp_receiver_fuzzer.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h"
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace {
+
+constexpr int kRtcpIntervalMs = 1000;
+
+// RTCP is typically sent over UDP, which has a maximum payload length
+// of 65535 bytes. We err on the side of caution and check a bit above that.
+constexpr size_t kMaxInputLenBytes = 66000;
+
+class NullModuleRtpRtcp : public RTCPReceiver::ModuleRtpRtcp {
+ public:
+ void SetTmmbn(std::vector<rtcp::TmmbItem>) override {}
+ void OnRequestSendReport() override {}
+ void OnReceivedNack(const std::vector<uint16_t>&) override {}
+ void OnReceivedRtcpReportBlocks(const ReportBlockList&) override {}
+};
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > kMaxInputLenBytes) {
+ return;
+ }
+
+ NullModuleRtpRtcp rtp_rtcp_module;
+ SimulatedClock clock(1234);
+
+ RtpRtcpInterface::Configuration config;
+ config.clock = &clock;
+ config.rtcp_report_interval_ms = kRtcpIntervalMs;
+ config.local_media_ssrc = 1;
+
+ RTCPReceiver receiver(config, &rtp_rtcp_module);
+
+ receiver.IncomingPacket(data, size);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc
new file mode 100644
index 0000000000..168e7b606b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <vector>
+
+#include "api/array_view.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ std::vector<rtc::ArrayView<const uint8_t>> rtp_payloads;
+
+ // Convert plain array of bytes into array of array bytes.
+ test::FuzzDataHelper fuzz_input(rtc::MakeArrayView(data, size));
+ while (fuzz_input.CanReadBytes(sizeof(uint16_t))) {
+ // In practice one rtp payload can be up to ~1200 - 1500 bytes. Majority
+ // of the payload is just copied. To make fuzzing more efficient limit the
+ // size of rtp payload to realistic value.
+ uint16_t next_size = fuzz_input.Read<uint16_t>() % 1200;
+ if (next_size > fuzz_input.BytesLeft()) {
+ next_size = fuzz_input.BytesLeft();
+ }
+ rtp_payloads.push_back(fuzz_input.ReadByteArray(next_size));
+ }
+ // Run code under test.
+ VideoRtpDepacketizerAv1().AssembleFrame(rtp_payloads);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc
new file mode 100644
index 0000000000..82404f7264
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+#include <memory>
+#include <utility>
+
+#include "api/array_view.h"
+#include "common_video/generic_frame_descriptor/generic_frame_info.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "rtc_base/checks.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ FrameDependencyStructure structure1;
+ // nullptr during 1st while loop, after that should point to structure1.
+ const FrameDependencyStructure* structure1_ptr = nullptr;
+ std::unique_ptr<const FrameDependencyStructure> structure2;
+
+ test::FuzzDataHelper fuzz_data(rtc::MakeArrayView(data, size));
+ while (fuzz_data.CanReadBytes(1)) {
+ // Treat next byte as size of the next extension. That aligns how
+ // two-byte rtp header extension sizes are written.
+ size_t next_size = fuzz_data.Read<uint8_t>();
+ auto raw =
+ fuzz_data.ReadByteArray(std::min(next_size, fuzz_data.BytesLeft()));
+
+ // Read the random input.
+ DependencyDescriptor descriptor1;
+ if (!RtpDependencyDescriptorExtension::Parse(raw, structure1_ptr,
+ &descriptor1)) {
+ // Ignore invalid buffer and move on.
+ continue;
+ }
+ if (descriptor1.attached_structure) {
+ structure1 = *descriptor1.attached_structure;
+ structure1_ptr = &structure1;
+ }
+ RTC_CHECK(structure1_ptr);
+
+ // Write parsed descriptor back into raw buffer.
+ size_t value_size =
+ RtpDependencyDescriptorExtension::ValueSize(structure1, descriptor1);
+ // Check `writer` use minimal number of bytes to pack the descriptor by
+ // checking it doesn't use more than reader consumed.
+ RTC_CHECK_LE(value_size, raw.size());
+ uint8_t some_memory[256];
+ // That should be true because value_size <= next_size < 256
+ RTC_CHECK_LT(value_size, 256);
+ rtc::ArrayView<uint8_t> write_buffer(some_memory, value_size);
+ RTC_CHECK(RtpDependencyDescriptorExtension::Write(write_buffer, structure1,
+ descriptor1));
+
+ // Parse what Write assembled.
+ // Unlike random input that should always succeed.
+ DependencyDescriptor descriptor2;
+ RTC_CHECK(RtpDependencyDescriptorExtension::Parse(
+ write_buffer, structure2.get(), &descriptor2));
+ // Check descriptor1 and descriptor2 have same values.
+ RTC_CHECK_EQ(descriptor1.first_packet_in_frame,
+ descriptor2.first_packet_in_frame);
+ RTC_CHECK_EQ(descriptor1.last_packet_in_frame,
+ descriptor2.last_packet_in_frame);
+ RTC_CHECK_EQ(descriptor1.attached_structure != nullptr,
+ descriptor2.attached_structure != nullptr);
+ // Using value_or would miss invalid corner case when one value is nullopt
+ // while another one is 0, but for other errors would produce much nicer
+ // error message than using RTC_CHECK(optional1 == optional2);
+ // If logger would support pretty printing optional values, value_or can be
+ // removed.
+ RTC_CHECK_EQ(descriptor1.active_decode_targets_bitmask.value_or(0),
+ descriptor2.active_decode_targets_bitmask.value_or(0));
+ RTC_CHECK_EQ(descriptor1.frame_number, descriptor2.frame_number);
+ RTC_CHECK(descriptor1.resolution == descriptor2.resolution);
+ RTC_CHECK(descriptor1.frame_dependencies == descriptor2.frame_dependencies);
+
+ if (descriptor2.attached_structure) {
+ structure2 = std::move(descriptor2.attached_structure);
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
new file mode 100644
index 0000000000..fdb4aa5f3c
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "api/rtp_packet_infos.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+
+namespace webrtc {
+
+namespace {
+class DataReader {
+ public:
+ DataReader(const uint8_t* data, size_t size) : data_(data), size_(size) {}
+
+ template <typename T>
+ void CopyTo(T* object) {
+ static_assert(std::is_pod<T>(), "");
+ uint8_t* destination = reinterpret_cast<uint8_t*>(object);
+ size_t object_size = sizeof(T);
+ size_t num_bytes = std::min(size_ - offset_, object_size);
+ memcpy(destination, data_ + offset_, num_bytes);
+ offset_ += num_bytes;
+
+ // If we did not have enough data, fill the rest with 0.
+ object_size -= num_bytes;
+ memset(destination + num_bytes, 0, object_size);
+ }
+
+ template <typename T>
+ T GetNum() {
+ T res;
+ if (offset_ + sizeof(res) < size_) {
+ memcpy(&res, data_ + offset_, sizeof(res));
+ offset_ += sizeof(res);
+ return res;
+ }
+
+ offset_ = size_;
+ return T(0);
+ }
+
+ bool MoreToRead() { return offset_ < size_; }
+
+ private:
+ const uint8_t* data_;
+ size_t size_;
+ size_t offset_ = 0;
+};
+
+absl::optional<RTPVideoHeader::GenericDescriptorInfo>
+GenerateGenericFrameDependencies(DataReader* reader) {
+ absl::optional<RTPVideoHeader::GenericDescriptorInfo> result;
+ uint8_t flags = reader->GetNum<uint8_t>();
+ if (flags & 0b1000'0000) {
+ // i.e. with 50% chance there are no generic dependencies.
+ // in such case codec-specfic code path of the RtpFrameReferenceFinder will
+ // be validated.
+ return result;
+ }
+
+ result.emplace();
+ result->frame_id = reader->GetNum<int32_t>();
+ result->spatial_index = (flags & 0b0111'0000) >> 4;
+ result->temporal_index = (flags & 0b0000'1110) >> 1;
+
+ // Larger than supported by the RtpFrameReferenceFinder.
+ int num_diffs = (reader->GetNum<uint8_t>() % 16);
+ for (int i = 0; i < num_diffs; ++i) {
+ result->dependencies.push_back(result->frame_id -
+ (reader->GetNum<uint16_t>() % (1 << 14)));
+ }
+
+ return result;
+}
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ DataReader reader(data, size);
+ RtpFrameReferenceFinder reference_finder;
+
+ auto codec = static_cast<VideoCodecType>(reader.GetNum<uint8_t>() % 5);
+
+ while (reader.MoreToRead()) {
+ uint16_t first_seq_num = reader.GetNum<uint16_t>();
+ uint16_t last_seq_num = reader.GetNum<uint16_t>();
+ bool marker_bit = reader.GetNum<uint8_t>();
+
+ RTPVideoHeader video_header;
+ switch (reader.GetNum<uint8_t>() % 3) {
+ case 0:
+ video_header.frame_type = VideoFrameType::kEmptyFrame;
+ break;
+ case 1:
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ break;
+ case 2:
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ break;
+ }
+
+ switch (codec) {
+ case kVideoCodecVP8:
+ reader.CopyTo(
+ &video_header.video_type_header.emplace<RTPVideoHeaderVP8>());
+ break;
+ case kVideoCodecVP9:
+ reader.CopyTo(
+ &video_header.video_type_header.emplace<RTPVideoHeaderVP9>());
+ break;
+ case kVideoCodecH264:
+ reader.CopyTo(
+ &video_header.video_type_header.emplace<RTPVideoHeaderH264>());
+ break;
+ default:
+ break;
+ }
+
+ video_header.generic = GenerateGenericFrameDependencies(&reader);
+
+ // clang-format off
+ auto frame = std::make_unique<RtpFrameObject>(
+ first_seq_num,
+ last_seq_num,
+ marker_bit,
+ /*times_nacked=*/0,
+ /*first_packet_received_time=*/0,
+ /*last_packet_received_time=*/0,
+ /*rtp_timestamp=*/0,
+ /*ntp_time_ms=*/0,
+ VideoSendTiming(),
+ /*payload_type=*/0,
+ codec,
+ kVideoRotation_0,
+ VideoContentType::UNSPECIFIED,
+ video_header,
+ /*color_space=*/absl::nullopt,
+ RtpPacketInfos(),
+ EncodedImageBuffer::Create(/*size=*/0));
+ // clang-format on
+
+ reference_finder.ManageFrame(std::move(frame));
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_packet_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_packet_fuzzer.cc
new file mode 100644
index 0000000000..5d117529bb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_packet_fuzzer.cc
@@ -0,0 +1,182 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <bitset>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h"
+
+namespace webrtc {
+// We decide which header extensions to register by reading four bytes
+// from the beginning of `data` and interpreting it as a bitmask over
+// the RTPExtensionType enum. This assert ensures four bytes are enough.
+static_assert(kRtpExtensionNumberOfExtensions <= 32,
+ "Insufficient bits read to configure all header extensions. Add "
+ "an extra byte and update the switches.");
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size <= 4)
+ return;
+
+ // Don't use the configuration byte as part of the packet.
+ std::bitset<32> extensionMask(*reinterpret_cast<const uint32_t*>(data));
+ data += 4;
+ size -= 4;
+
+ RtpPacketReceived::ExtensionManager extensions(/*extmap_allow_mixed=*/true);
+ // Start at local_id = 1 since 0 is an invalid extension id.
+ int local_id = 1;
+ // Skip i = 0 since it maps to kRtpExtensionNone.
+ for (int i = 1; i < kRtpExtensionNumberOfExtensions; i++) {
+ RTPExtensionType extension_type = static_cast<RTPExtensionType>(i);
+ if (extensionMask[i]) {
+ // Extensions are registered with an ID, which you signal to the
+ // peer so they know what to expect. This code only cares about
+ // parsing so the value of the ID isn't relevant.
+ extensions.RegisterByType(local_id++, extension_type);
+ }
+ }
+
+ RtpPacketReceived packet(&extensions);
+ packet.Parse(data, size);
+
+ // Call packet accessors because they have extra checks.
+ packet.Marker();
+ packet.PayloadType();
+ packet.SequenceNumber();
+ packet.Timestamp();
+ packet.Ssrc();
+ packet.Csrcs();
+
+ // Each extension has its own getter. It is supported behaviour to
+ // call GetExtension on an extension which was not registered, so we
+ // don't check the bitmask here.
+ for (int i = 0; i < kRtpExtensionNumberOfExtensions; i++) {
+ switch (static_cast<RTPExtensionType>(i)) {
+ case kRtpExtensionNone:
+ case kRtpExtensionNumberOfExtensions:
+ break;
+ case kRtpExtensionTransmissionTimeOffset:
+ int32_t offset;
+ packet.GetExtension<TransmissionOffset>(&offset);
+ break;
+ case kRtpExtensionAudioLevel:
+ bool voice_activity;
+ uint8_t audio_level;
+ packet.GetExtension<AudioLevel>(&voice_activity, &audio_level);
+ break;
+#if !defined(WEBRTC_MOZILLA_BUILD)
+ case kRtpExtensionCsrcAudioLevel: {
+ std::vector<uint8_t> audio_levels;
+ packet.GetExtension<CsrcAudioLevel>(&audio_levels);
+ break;
+ }
+#endif
+ case kRtpExtensionAbsoluteSendTime:
+ uint32_t sendtime;
+ packet.GetExtension<AbsoluteSendTime>(&sendtime);
+ break;
+ case kRtpExtensionAbsoluteCaptureTime: {
+ AbsoluteCaptureTime extension;
+ packet.GetExtension<AbsoluteCaptureTimeExtension>(&extension);
+ break;
+ }
+ case kRtpExtensionVideoRotation:
+ uint8_t rotation;
+ packet.GetExtension<VideoOrientation>(&rotation);
+ break;
+ case kRtpExtensionTransportSequenceNumber:
+ uint16_t seqnum;
+ packet.GetExtension<TransportSequenceNumber>(&seqnum);
+ break;
+ case kRtpExtensionTransportSequenceNumber02: {
+ uint16_t seqnum;
+ absl::optional<FeedbackRequest> feedback_request;
+ packet.GetExtension<TransportSequenceNumberV2>(&seqnum,
+ &feedback_request);
+ break;
+ }
+ case kRtpExtensionPlayoutDelay: {
+ VideoPlayoutDelay playout;
+ packet.GetExtension<PlayoutDelayLimits>(&playout);
+ break;
+ }
+ case kRtpExtensionVideoContentType:
+ VideoContentType content_type;
+ packet.GetExtension<VideoContentTypeExtension>(&content_type);
+ break;
+ case kRtpExtensionVideoTiming: {
+ VideoSendTiming timing;
+ packet.GetExtension<VideoTimingExtension>(&timing);
+ break;
+ }
+ case kRtpExtensionRtpStreamId: {
+ std::string rsid;
+ packet.GetExtension<RtpStreamId>(&rsid);
+ break;
+ }
+ case kRtpExtensionRepairedRtpStreamId: {
+ std::string rsid;
+ packet.GetExtension<RepairedRtpStreamId>(&rsid);
+ break;
+ }
+ case kRtpExtensionMid: {
+ std::string mid;
+ packet.GetExtension<RtpMid>(&mid);
+ break;
+ }
+ case kRtpExtensionGenericFrameDescriptor: {
+ RtpGenericFrameDescriptor descriptor;
+ packet.GetExtension<RtpGenericFrameDescriptorExtension00>(&descriptor);
+ break;
+ }
+ case kRtpExtensionColorSpace: {
+ ColorSpace color_space;
+ packet.GetExtension<ColorSpaceExtension>(&color_space);
+ break;
+ }
+ case kRtpExtensionInbandComfortNoise: {
+ absl::optional<uint8_t> noise_level;
+ packet.GetExtension<InbandComfortNoiseExtension>(&noise_level);
+ break;
+ }
+ case kRtpExtensionVideoLayersAllocation: {
+ VideoLayersAllocation allocation;
+ packet.GetExtension<RtpVideoLayersAllocationExtension>(&allocation);
+ break;
+ }
+ case kRtpExtensionVideoFrameTrackingId: {
+ uint16_t tracking_id;
+ packet.GetExtension<VideoFrameTrackingIdExtension>(&tracking_id);
+ break;
+ }
+ case kRtpExtensionDependencyDescriptor:
+ // This extension requires state to read and so complicated that
+ // deserves own fuzzer.
+ break;
+#if defined(WEBRTC_MOZILLA_BUILD)
+ case kRtpExtensionCsrcAudioLevel: {
+ CsrcAudioLevelList levels;
+ packet.GetExtension<CsrcAudioLevel>(&levels);
+ break;
+ }
+#endif
+ }
+ }
+
+ // Check that zero-ing mutable extensions wouldn't cause any problems.
+ packet.ZeroMutableExtensions();
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_packetizer_av1_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_packetizer_av1_fuzzer.cc
new file mode 100644
index 0000000000..e5550c1279
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_packetizer_av1_fuzzer.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/video/video_frame_type.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_packetizer_av1.h"
+#include "rtc_base/checks.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ test::FuzzDataHelper fuzz_input(rtc::MakeArrayView(data, size));
+
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ // Read uint8_t to be sure reduction_lens are much smaller than
+ // max_payload_len and thus limits structure is valid.
+ limits.first_packet_reduction_len = fuzz_input.ReadOrDefaultValue<uint8_t>(0);
+ limits.last_packet_reduction_len = fuzz_input.ReadOrDefaultValue<uint8_t>(0);
+ limits.single_packet_reduction_len =
+ fuzz_input.ReadOrDefaultValue<uint8_t>(0);
+ const VideoFrameType kFrameTypes[] = {VideoFrameType::kVideoFrameKey,
+ VideoFrameType::kVideoFrameDelta};
+ VideoFrameType frame_type = fuzz_input.SelectOneOf(kFrameTypes);
+
+ // Main function under test: RtpPacketizerAv1's constructor.
+ RtpPacketizerAv1 packetizer(fuzz_input.ReadByteArray(fuzz_input.BytesLeft()),
+ limits, frame_type,
+ /*is_last_frame_in_picture=*/true);
+
+ size_t num_packets = packetizer.NumPackets();
+ if (num_packets == 0) {
+ return;
+ }
+ // When packetization was successful, validate NextPacket function too.
+ // While at it, check that packets respect the payload size limits.
+ RtpPacketToSend rtp_packet(nullptr);
+ // Single packet.
+ if (num_packets == 1) {
+ RTC_CHECK(packetizer.NextPacket(&rtp_packet));
+ RTC_CHECK_LE(rtp_packet.payload_size(),
+ limits.max_payload_len - limits.single_packet_reduction_len);
+ return;
+ }
+ // First packet.
+ RTC_CHECK(packetizer.NextPacket(&rtp_packet));
+ RTC_CHECK_LE(rtp_packet.payload_size(),
+ limits.max_payload_len - limits.first_packet_reduction_len);
+ // Middle packets.
+ for (size_t i = 1; i < num_packets - 1; ++i) {
+ RTC_CHECK(packetizer.NextPacket(&rtp_packet))
+ << "Failed to get packet#" << i;
+ RTC_CHECK_LE(rtp_packet.payload_size(), limits.max_payload_len)
+ << "Packet #" << i << " exceeds it's limit";
+ }
+ // Last packet.
+ RTC_CHECK(packetizer.NextPacket(&rtp_packet));
+ RTC_CHECK_LE(rtp_packet.payload_size(),
+ limits.max_payload_len - limits.last_packet_reduction_len);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_video_frame_assembler_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_video_frame_assembler_fuzzer.cc
new file mode 100644
index 0000000000..6ab6b9a905
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_video_frame_assembler_fuzzer.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <cstddef>
+#include <cstdint>
+
+#include "api/video/rtp_video_frame_assembler.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size == 0) {
+ return;
+ }
+ RtpHeaderExtensionMap extensions;
+ extensions.Register<RtpDependencyDescriptorExtension>(1);
+ extensions.Register<RtpGenericFrameDescriptorExtension00>(2);
+ RtpPacketReceived rtp_packet(&extensions);
+
+ RtpVideoFrameAssembler assembler(
+ static_cast<RtpVideoFrameAssembler::PayloadFormat>(data[0] % 6));
+
+ for (size_t i = 1; i < size;) {
+ size_t packet_size = std::min<size_t>(size - i, 300);
+ if (rtp_packet.Parse(data + i, packet_size)) {
+ assembler.InsertPacket(rtp_packet);
+ }
+ i += packet_size;
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc
new file mode 100644
index 0000000000..ae8b8728fb
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstddef>
+#include <cstdint>
+#include <limits>
+
+#include "api/array_view.h"
+#include "api/video/video_layers_allocation.h"
+#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ // Video layers allocation is an rtp header extension.
+ // Per https://datatracker.ietf.org/doc/html/rfc8285#section-4.3
+ // rtp header extension uses up to one byte to store the size, i.e.
+ // maximum size of any rtp header extension is 255 bytes.
+ constexpr int kMaxSize = std::numeric_limits<uint8_t>::max();
+ if (size > kMaxSize) {
+ return;
+ }
+ auto raw = rtc::MakeArrayView(data, size);
+
+ VideoLayersAllocation allocation1;
+ if (!RtpVideoLayersAllocationExtension::Parse(raw, &allocation1)) {
+ // Ignore invalid buffer and move on.
+ return;
+ }
+
+ // Write parsed allocation back into raw buffer.
+ size_t value_size = RtpVideoLayersAllocationExtension::ValueSize(allocation1);
+ // Check `writer` use minimal number of bytes to pack the extension by
+ // checking it doesn't use more than reader consumed.
+ RTC_CHECK_LE(value_size, raw.size());
+ uint8_t some_memory[kMaxSize];
+ RTC_CHECK_LE(value_size, kMaxSize);
+ rtc::ArrayView<uint8_t> write_buffer(some_memory, value_size);
+ RTC_CHECK(
+ RtpVideoLayersAllocationExtension::Write(write_buffer, allocation1));
+
+ // Parse what Write assembled.
+ // Unlike random input that should always succeed.
+ VideoLayersAllocation allocation2;
+ RTC_CHECK(
+ RtpVideoLayersAllocationExtension::Parse(write_buffer, &allocation2));
+
+ RTC_CHECK_EQ(allocation1.rtp_stream_index, allocation2.rtp_stream_index);
+ RTC_CHECK_EQ(allocation1.resolution_and_frame_rate_is_valid,
+ allocation2.resolution_and_frame_rate_is_valid);
+ RTC_CHECK_EQ(allocation1.active_spatial_layers.size(),
+ allocation2.active_spatial_layers.size());
+ RTC_CHECK(allocation1 == allocation2);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/sctp_utils_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/sctp_utils_fuzzer.cc
new file mode 100644
index 0000000000..249707514e
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/sctp_utils_fuzzer.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "api/data_channel_interface.h"
+#include "pc/sctp_utils.h"
+#include "rtc_base/copy_on_write_buffer.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ rtc::CopyOnWriteBuffer payload(data, size);
+ std::string label;
+ DataChannelInit config;
+ IsOpenMessage(payload);
+ ParseDataChannelOpenMessage(payload, &label, &config);
+ ParseDataChannelOpenAckMessage(payload);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/sdp_integration_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/sdp_integration_fuzzer.cc
new file mode 100644
index 0000000000..ece4b50505
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/sdp_integration_fuzzer.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "absl/strings/string_view.h"
+#include "pc/test/integration_test_helpers.h"
+
+namespace webrtc {
+
+class FuzzerTest : public PeerConnectionIntegrationBaseTest {
+ public:
+ FuzzerTest()
+ : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {}
+
+ void RunNegotiateCycle(absl::string_view message) {
+ CreatePeerConnectionWrappers();
+ // Note - we do not do test.ConnectFakeSignaling(); all signals
+ // generated are discarded.
+
+ auto srd_observer =
+ rtc::make_ref_counted<FakeSetRemoteDescriptionObserver>();
+
+ SdpParseError error;
+ std::unique_ptr<SessionDescriptionInterface> sdp(
+ CreateSessionDescription("offer", std::string(message), &error));
+ caller()->pc()->SetRemoteDescription(std::move(sdp), srd_observer);
+ // Wait a short time for observer to be called. Timeout is short
+ // because the fuzzer should be trying many branches.
+ EXPECT_TRUE_WAIT(srd_observer->called(), 100);
+
+ // If set-remote-description was successful, try to answer.
+ auto sld_observer =
+ rtc::make_ref_counted<FakeSetLocalDescriptionObserver>();
+ if (srd_observer->error().ok()) {
+ caller()->pc()->SetLocalDescription(sld_observer);
+ EXPECT_TRUE_WAIT(sld_observer->called(), 100);
+ }
+ // If there is an EXPECT failure, die here.
+ RTC_CHECK(!HasFailure());
+ }
+
+ // This test isn't using the test definition macros, so we have to
+ // define the TestBody() function, even though we don't need it.
+ void TestBody() override {}
+};
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 16384) {
+ return;
+ }
+
+ FuzzerTest test;
+ test.RunNegotiateCycle(
+ absl::string_view(reinterpret_cast<const char*>(data), size));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/sdp_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/sdp_parser_fuzzer.cc
new file mode 100644
index 0000000000..c85eab4047
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/sdp_parser_fuzzer.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/jsep_session_description.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ if (size > 16384) {
+ return;
+ }
+ std::string message(reinterpret_cast<const char*>(data), size);
+ webrtc::SdpParseError error;
+
+ std::unique_ptr<webrtc::SessionDescriptionInterface> sdp(
+ CreateSessionDescription("offer", message, &error));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/ssl_certificate_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/ssl_certificate_fuzzer.cc
new file mode 100644
index 0000000000..4bab5c8f02
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/ssl_certificate_fuzzer.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "rtc_base/message_digest.h"
+#include "rtc_base/ssl_certificate.h"
+#include "rtc_base/string_encode.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ std::string pem_certificate(reinterpret_cast<const char*>(data), size);
+
+ std::unique_ptr<rtc::SSLCertificate> cert =
+ rtc::SSLCertificate::FromPEMString(pem_certificate);
+
+ if (cert == nullptr) {
+ return;
+ }
+
+ cert->Clone();
+ cert->GetStats();
+ cert->ToPEMString();
+ cert->CertificateExpirationTime();
+
+ std::string algorithm;
+ cert->GetSignatureDigestAlgorithm(&algorithm);
+
+ unsigned char digest[rtc::MessageDigest::kMaxSize];
+ size_t digest_len;
+ cert->ComputeDigest(algorithm, digest, rtc::MessageDigest::kMaxSize,
+ &digest_len);
+
+ rtc::Buffer der_buffer;
+ cert->ToDER(&der_buffer);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/string_to_number_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/string_to_number_fuzzer.cc
new file mode 100644
index 0000000000..28b36a73ce
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/string_to_number_fuzzer.cc
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "rtc_base/string_to_number.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ std::string number_to_parse(reinterpret_cast<const char*>(data), size);
+ rtc::StringToNumber<int8_t>(number_to_parse);
+ rtc::StringToNumber<int16_t>(number_to_parse);
+ rtc::StringToNumber<int32_t>(number_to_parse);
+ rtc::StringToNumber<int64_t>(number_to_parse);
+ rtc::StringToNumber<uint8_t>(number_to_parse);
+ rtc::StringToNumber<uint16_t>(number_to_parse);
+ rtc::StringToNumber<uint32_t>(number_to_parse);
+ rtc::StringToNumber<uint64_t>(number_to_parse);
+ rtc::StringToNumber<float>(number_to_parse);
+ rtc::StringToNumber<double>(number_to_parse);
+ rtc::StringToNumber<long double>(number_to_parse);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/stun_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/stun_parser_fuzzer.cc
new file mode 100644
index 0000000000..6ca9eac8b2
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/stun_parser_fuzzer.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/transport/stun.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const char* message = reinterpret_cast<const char*>(data);
+
+ // Normally we'd check the integrity first, but those checks are
+ // fuzzed separately in stun_validator_fuzzer.cc. We still want to
+ // fuzz this target since the integrity checks could be forged by a
+ // malicious adversary who receives a call.
+ std::unique_ptr<cricket::IceMessage> stun_msg(new cricket::IceMessage());
+ rtc::ByteBufferReader buf(message, size);
+ stun_msg->Read(&buf);
+ stun_msg->ValidateMessageIntegrity("");
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/stun_validator_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/stun_validator_fuzzer.cc
new file mode 100644
index 0000000000..421638db1b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/stun_validator_fuzzer.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/transport/stun.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ const char* message = reinterpret_cast<const char*>(data);
+
+ cricket::StunMessage::ValidateFingerprint(message, size);
+ cricket::StunMessage::ValidateMessageIntegrityForTesting(message, size, "");
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/turn_unwrap_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/turn_unwrap_fuzzer.cc
new file mode 100644
index 0000000000..47ee7fd205
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/turn_unwrap_fuzzer.cc
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "media/base/turn_utils.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ size_t content_position;
+ size_t content_size;
+ cricket::UnwrapTurnPacket(data, size, &content_position, &content_size);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/ulpfec_generator_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/ulpfec_generator_fuzzer.cc
new file mode 100644
index 0000000000..43d9450918
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/ulpfec_generator_fuzzer.cc
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "modules/include/module_common_types_public.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+#include "modules/rtp_rtcp/source/ulpfec_generator.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+namespace {
+constexpr uint8_t kFecPayloadType = 96;
+constexpr uint8_t kRedPayloadType = 97;
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ SimulatedClock clock(1);
+ UlpfecGenerator generator(kRedPayloadType, kFecPayloadType, &clock);
+ size_t i = 0;
+ if (size < 4)
+ return;
+ FecProtectionParams params = {
+ data[i++] % 128, static_cast<int>(data[i++] % 10), kFecMaskBursty};
+ generator.SetProtectionParameters(params, params);
+ uint16_t seq_num = data[i++];
+ uint16_t prev_seq_num = 0;
+ while (i + 3 < size) {
+ size_t rtp_header_length = data[i++] % 10 + 12;
+ size_t payload_size = data[i++] % 10;
+ if (i + payload_size + rtp_header_length + 2 > size)
+ break;
+ rtc::CopyOnWriteBuffer packet(&data[i], payload_size + rtp_header_length);
+ packet.EnsureCapacity(IP_PACKET_SIZE);
+ // Write a valid parsable header (version = 2, no padding, no extensions,
+ // no CSRCs).
+ ByteWriter<uint8_t>::WriteBigEndian(packet.MutableData(), 2 << 6);
+ // Make sure sequence numbers are increasing.
+ ByteWriter<uint16_t>::WriteBigEndian(packet.MutableData() + 2, seq_num++);
+ i += payload_size + rtp_header_length;
+ const bool protect = data[i++] % 2 == 1;
+
+ // Check the sequence numbers are monotonic. In rare case the packets number
+ // may loop around and in the same FEC-protected group the packet sequence
+ // number became out of order.
+ if (protect && IsNewerSequenceNumber(seq_num, prev_seq_num) &&
+ seq_num < prev_seq_num + kUlpfecMaxMediaPackets) {
+ RtpPacketToSend rtp_packet(nullptr);
+ // Check that we actually have a parsable packet, we want to fuzz FEC
+ // logic, not RTP header parsing.
+ RTC_CHECK(rtp_packet.Parse(packet));
+ generator.AddPacketAndGenerateFec(rtp_packet);
+ prev_seq_num = seq_num;
+ }
+
+ generator.GetFecPackets();
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/ulpfec_header_reader_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/ulpfec_header_reader_fuzzer.cc
new file mode 100644
index 0000000000..243cb4ed70
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/ulpfec_header_reader_fuzzer.cc
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/ulpfec_header_reader_writer.h"
+
+namespace webrtc {
+
+using Packet = ForwardErrorCorrection::Packet;
+using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket;
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ ReceivedFecPacket packet;
+ packet.pkt = rtc::scoped_refptr<Packet>(new Packet());
+ const size_t packet_size =
+ std::min(size, static_cast<size_t>(IP_PACKET_SIZE));
+ packet.pkt->data.SetSize(packet_size);
+ packet.pkt->data.EnsureCapacity(IP_PACKET_SIZE);
+ memcpy(packet.pkt->data.MutableData(), data, packet_size);
+
+ UlpfecHeaderReader ulpfec_reader;
+ ulpfec_reader.ReadFecHeader(&packet);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/ulpfec_receiver_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/ulpfec_receiver_fuzzer.cc
new file mode 100644
index 0000000000..0a29ba3259
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/ulpfec_receiver_fuzzer.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/ulpfec_receiver.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+namespace webrtc {
+
+namespace {
+class DummyCallback : public RecoveredPacketReceiver {
+ void OnRecoveredPacket(const RtpPacketReceived& packet) override {}
+};
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ constexpr size_t kMinDataNeeded = 12;
+ if (size < kMinDataNeeded || size > 2000) {
+ return;
+ }
+
+ uint32_t ulpfec_ssrc = ByteReader<uint32_t>::ReadLittleEndian(data + 0);
+ uint16_t ulpfec_seq_num = ByteReader<uint16_t>::ReadLittleEndian(data + 4);
+ uint32_t media_ssrc = ByteReader<uint32_t>::ReadLittleEndian(data + 6);
+ uint16_t media_seq_num = ByteReader<uint16_t>::ReadLittleEndian(data + 10);
+
+ DummyCallback callback;
+ UlpfecReceiver receiver(ulpfec_ssrc, 0, &callback, Clock::GetRealTimeClock());
+
+ test::FuzzDataHelper fuzz_data(rtc::MakeArrayView(data, size));
+ while (fuzz_data.CanReadBytes(kMinDataNeeded)) {
+ size_t packet_length = kRtpHeaderSize + fuzz_data.Read<uint8_t>();
+ auto raw_packet = fuzz_data.ReadByteArray(packet_length);
+
+ RtpPacketReceived parsed_packet;
+ if (!parsed_packet.Parse(raw_packet))
+ continue;
+
+ // Overwrite the fields for the sequence number and SSRC with
+ // consistent values for either a received UlpFEC packet or received media
+ // packet. (We're still relying on libfuzzer to manage to generate packet
+ // headers that interact together; this just ensures that we have two
+ // consistent streams).
+ if (fuzz_data.ReadOrDefaultValue<uint8_t>(0) % 2 == 0) {
+ // Simulate UlpFEC packet.
+ parsed_packet.SetSequenceNumber(ulpfec_seq_num++);
+ parsed_packet.SetSsrc(ulpfec_ssrc);
+ } else {
+ // Simulate media packet.
+ parsed_packet.SetSequenceNumber(media_seq_num++);
+ parsed_packet.SetSsrc(media_ssrc);
+ }
+
+ receiver.AddReceivedRedPacket(parsed_packet);
+ }
+
+ receiver.ProcessReceivedFec();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/utils/BUILD.gn b/third_party/libwebrtc/test/fuzzers/utils/BUILD.gn
new file mode 100644
index 0000000000..c5744fc33b
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/utils/BUILD.gn
@@ -0,0 +1,47 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("rtp_replayer") {
+ testonly = true
+ sources = [
+ "rtp_replayer.cc",
+ "rtp_replayer.h",
+ ]
+ deps = [
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/test/video:function_video_factory",
+ "../../../api/transport:field_trial_based_config",
+ "../../../api/units:timestamp",
+ "../../../api/video_codecs:video_codecs_api",
+ "../../../call",
+ "../../../call:call_interfaces",
+ "../../../common_video",
+ "../../../media:rtc_internal_video_codecs",
+ "../../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_tests_utils",
+ "../../../rtc_base:rtc_json",
+ "../../../rtc_base:timeutils",
+ "../../../system_wrappers",
+ "../../../test:call_config_utils",
+ "../../../test:encoder_settings",
+ "../../../test:fake_video_codecs",
+ "../../../test:null_transport",
+ "../../../test:rtp_test_utils",
+ "../../../test:run_loop",
+ "../../../test:run_test",
+ "../../../test:run_test_interface",
+ "../../../test:test_renderer",
+ "../../../test:test_support",
+ "../../../test:video_test_common",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory:memory" ]
+}
diff --git a/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc
new file mode 100644
index 0000000000..12743d89d9
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/fuzzers/utils/rtp_replayer.h"
+
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/units/timestamp.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "rtc_base/strings/json.h"
+#include "system_wrappers/include/clock.h"
+#include "test/call_config_utils.h"
+#include "test/encoder_settings.h"
+#include "test/fake_decoder.h"
+#include "test/rtp_file_reader.h"
+#include "test/run_loop.h"
+
+namespace webrtc {
+namespace test {
+
+void RtpReplayer::Replay(const std::string& replay_config_filepath,
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size) {
+ auto stream_state = std::make_unique<StreamState>();
+ std::vector<VideoReceiveStreamInterface::Config> receive_stream_configs =
+ ReadConfigFromFile(replay_config_filepath, &(stream_state->transport));
+ return Replay(std::move(stream_state), std::move(receive_stream_configs),
+ rtp_dump_data, rtp_dump_size);
+}
+
+void RtpReplayer::Replay(
+ std::unique_ptr<StreamState> stream_state,
+ std::vector<VideoReceiveStreamInterface::Config> receive_stream_configs,
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size) {
+ RunLoop loop;
+ rtc::ScopedBaseFakeClock fake_clock;
+
+ // Work around: webrtc calls webrtc::Random(clock.TimeInMicroseconds())
+ // everywhere and Random expects non-zero seed. Let's set the clock non-zero
+ // to make them happy.
+ fake_clock.SetTime(webrtc::Timestamp::Millis(1));
+
+ // Attempt to create an RtpReader from the input file.
+ auto rtp_reader = CreateRtpReader(rtp_dump_data, rtp_dump_size);
+ if (rtp_reader == nullptr) {
+ RTC_LOG(LS_ERROR) << "Failed to create the rtp_reader";
+ return;
+ }
+
+ RtpHeaderExtensionMap extensions(/*extmap_allow_mixed=*/true);
+ // Skip i = 0 since it maps to kRtpExtensionNone.
+ for (int i = 1; i < kRtpExtensionNumberOfExtensions; i++) {
+ RTPExtensionType extension_type = static_cast<RTPExtensionType>(i);
+ // Extensions are registered with an ID, which you signal to the
+ // peer so they know what to expect. This code only cares about
+ // parsing so the value of the ID isn't relevant.
+ extensions.RegisterByType(i, extension_type);
+ }
+
+ // Setup the video streams based on the configuration.
+ webrtc::RtcEventLogNull event_log;
+ std::unique_ptr<TaskQueueFactory> task_queue_factory =
+ CreateDefaultTaskQueueFactory();
+ Call::Config call_config(&event_log);
+ call_config.task_queue_factory = task_queue_factory.get();
+ FieldTrialBasedConfig field_trials;
+ call_config.trials = &field_trials;
+ std::unique_ptr<Call> call(Call::Create(call_config));
+ SetupVideoStreams(&receive_stream_configs, stream_state.get(), call.get());
+
+ // Start replaying the provided stream now that it has been configured.
+ for (const auto& receive_stream : stream_state->receive_streams) {
+ receive_stream->Start();
+ }
+
+ ReplayPackets(&fake_clock, call.get(), rtp_reader.get(), extensions);
+
+ for (const auto& receive_stream : stream_state->receive_streams) {
+ call->DestroyVideoReceiveStream(receive_stream);
+ }
+}
+
+std::vector<VideoReceiveStreamInterface::Config>
+RtpReplayer::ReadConfigFromFile(const std::string& replay_config,
+ Transport* transport) {
+ Json::CharReaderBuilder factory;
+ std::unique_ptr<Json::CharReader> json_reader =
+ absl::WrapUnique(factory.newCharReader());
+ Json::Value json_configs;
+ Json::String errors;
+ if (!json_reader->parse(replay_config.data(),
+ replay_config.data() + replay_config.length(),
+ &json_configs, &errors)) {
+ RTC_LOG(LS_ERROR)
+ << "Error parsing JSON replay configuration for the fuzzer: " << errors;
+ return {};
+ }
+
+ std::vector<VideoReceiveStreamInterface::Config> receive_stream_configs;
+ receive_stream_configs.reserve(json_configs.size());
+ for (const auto& json : json_configs) {
+ receive_stream_configs.push_back(
+ ParseVideoReceiveStreamJsonConfig(transport, json));
+ }
+ return receive_stream_configs;
+}
+
+void RtpReplayer::SetupVideoStreams(
+ std::vector<VideoReceiveStreamInterface::Config>* receive_stream_configs,
+ StreamState* stream_state,
+ Call* call) {
+ stream_state->decoder_factory = std::make_unique<InternalDecoderFactory>();
+ for (auto& receive_config : *receive_stream_configs) {
+ // Attach the decoder for the corresponding payload type in the config.
+ for (auto& decoder : receive_config.decoders) {
+ decoder = test::CreateMatchingDecoder(decoder.payload_type,
+ decoder.video_format.name);
+ }
+
+ // Create the window to display the rendered video.
+ stream_state->sinks.emplace_back(
+ test::VideoRenderer::Create("Fuzzing WebRTC Video Config", 640, 480));
+ // Create a receive stream for this config.
+ receive_config.renderer = stream_state->sinks.back().get();
+ receive_config.decoder_factory = stream_state->decoder_factory.get();
+ stream_state->receive_streams.emplace_back(
+ call->CreateVideoReceiveStream(std::move(receive_config)));
+ }
+}
+
+std::unique_ptr<test::RtpFileReader> RtpReplayer::CreateRtpReader(
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size) {
+ std::unique_ptr<test::RtpFileReader> rtp_reader(test::RtpFileReader::Create(
+ test::RtpFileReader::kRtpDump, rtp_dump_data, rtp_dump_size, {}));
+ if (!rtp_reader) {
+ RTC_LOG(LS_ERROR) << "Unable to open input file with any supported format";
+ return nullptr;
+ }
+ return rtp_reader;
+}
+
+void RtpReplayer::ReplayPackets(
+ rtc::FakeClock* clock,
+ Call* call,
+ test::RtpFileReader* rtp_reader,
+ const RtpPacketReceived::ExtensionManager& extensions) {
+ int64_t replay_start_ms = -1;
+
+ while (true) {
+ int64_t now_ms = rtc::TimeMillis();
+ if (replay_start_ms == -1) {
+ replay_start_ms = now_ms;
+ }
+
+ test::RtpPacket packet;
+ if (!rtp_reader->NextPacket(&packet)) {
+ break;
+ }
+
+ int64_t deliver_in_ms = replay_start_ms + packet.time_ms - now_ms;
+ if (deliver_in_ms > 0) {
+ // StatsCounter::ReportMetricToAggregatedCounter is O(elapsed time).
+ // Set an upper limit to prevent waste time.
+ clock->AdvanceTime(webrtc::TimeDelta::Millis(
+ std::min(deliver_in_ms, static_cast<int64_t>(100))));
+ }
+
+ RtpPacketReceived received_packet(
+ &extensions, Timestamp::Micros(clock->TimeNanos() / 1000));
+ if (!received_packet.Parse(packet.data, packet.length)) {
+ RTC_LOG(LS_ERROR) << "Packet error, corrupt packets or incorrect setup?";
+ break;
+ }
+
+ call->Receiver()->DeliverRtpPacket(
+ MediaType::VIDEO, std::move(received_packet),
+ [&](const RtpPacketReceived& parsed_packet) {
+ RTC_LOG(LS_ERROR) << "Unknown SSRC: " << parsed_packet.Ssrc();
+ return false;
+ });
+ }
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.h b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.h
new file mode 100644
index 0000000000..ae94a640a5
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FUZZERS_UTILS_RTP_REPLAYER_H_
+#define TEST_FUZZERS_UTILS_RTP_REPLAYER_H_
+
+#include <stdio.h>
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/test/video/function_video_decoder_factory.h"
+#include "api/video_codecs/video_decoder.h"
+#include "call/call.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/time_utils.h"
+#include "test/null_transport.h"
+#include "test/rtp_file_reader.h"
+#include "test/test_video_capturer.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+// The RtpReplayer is a utility for fuzzing the RTP/RTCP receiver stack in
+// WebRTC. It achieves this by accepting a set of Receiver configurations and
+// an RtpDump (consisting of both RTP and RTCP packets). The `rtp_dump` is
+// passed in as a buffer to allow simple mutation fuzzing directly on the dump.
+class RtpReplayer final {
+ public:
+ // Holds all the important stream information required to emulate the WebRTC
+ // rtp receival code path.
+ struct StreamState {
+ test::NullTransport transport;
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
+ std::vector<VideoReceiveStreamInterface*> receive_streams;
+ std::unique_ptr<VideoDecoderFactory> decoder_factory;
+ };
+
+ // Construct an RtpReplayer from a JSON replay configuration file.
+ static void Replay(const std::string& replay_config_filepath,
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size);
+
+ // Construct an RtpReplayer from a set of
+ // VideoReceiveStreamInterface::Configs. Note the stream_state.transport must
+ // be set for each receiver stream.
+ static void Replay(
+ std::unique_ptr<StreamState> stream_state,
+ std::vector<VideoReceiveStreamInterface::Config> receive_stream_config,
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size);
+
+ private:
+ // Reads the replay configuration from Json.
+ static std::vector<VideoReceiveStreamInterface::Config> ReadConfigFromFile(
+ const std::string& replay_config,
+ Transport* transport);
+
+ // Configures the stream state based on the receiver configurations.
+ static void SetupVideoStreams(
+ std::vector<VideoReceiveStreamInterface::Config>* receive_stream_configs,
+ StreamState* stream_state,
+ Call* call);
+
+ // Creates a new RtpReader which can read the RtpDump
+ static std::unique_ptr<test::RtpFileReader> CreateRtpReader(
+ const uint8_t* rtp_dump_data,
+ size_t rtp_dump_size);
+
+ // Replays each packet to from the RtpDump.
+ static void ReplayPackets(rtc::FakeClock* clock,
+ Call* call,
+ test::RtpFileReader* rtp_reader,
+ const RtpHeaderExtensionMap& extensions);
+}; // class RtpReplayer
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_FUZZERS_UTILS_RTP_REPLAYER_H_
diff --git a/third_party/libwebrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..1691b55cc0
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp8_depacketizer_fuzzer.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RTPVideoHeader video_header;
+ VideoRtpDepacketizerVp8::ParseRtpPayload(rtc::MakeArrayView(data, size),
+ &video_header);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc
new file mode 100644
index 0000000000..2ecfd820c8
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp8_qp_parser_fuzzer.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/utility/vp8_header_parser.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ int qp;
+ vp8::GetQp(data, size, &qp);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp8_replay_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp8_replay_fuzzer.cc
new file mode 100644
index 0000000000..819b9626f9
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp8_replay_fuzzer.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "test/fuzzers/utils/rtp_replayer.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ auto stream_state = std::make_unique<test::RtpReplayer::StreamState>();
+ VideoReceiveStreamInterface::Config vp8_config(&(stream_state->transport));
+
+ VideoReceiveStreamInterface::Decoder vp8_decoder;
+ vp8_decoder.video_format = SdpVideoFormat("VP8");
+ vp8_decoder.payload_type = 125;
+ vp8_config.decoders.push_back(std::move(vp8_decoder));
+
+ vp8_config.rtp.local_ssrc = 7731;
+ vp8_config.rtp.remote_ssrc = 1337;
+ vp8_config.rtp.rtx_ssrc = 100;
+ vp8_config.rtp.nack.rtp_history_ms = 1000;
+ vp8_config.rtp.lntf.enabled = true;
+
+ std::vector<VideoReceiveStreamInterface::Config> replay_configs;
+ replay_configs.push_back(std::move(vp8_config));
+
+ test::RtpReplayer::Replay(std::move(stream_state), std::move(replay_configs),
+ data, size);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc
new file mode 100644
index 0000000000..ae36a94931
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp9_depacketizer_fuzzer.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RTPVideoHeader video_header;
+ VideoRtpDepacketizerVp9::ParseRtpPayload(rtc::MakeArrayView(data, size),
+ &video_header);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp9_encoder_references_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp9_encoder_references_fuzzer.cc
new file mode 100644
index 0000000000..09848ae4de
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp9_encoder_references_fuzzer.cc
@@ -0,0 +1,624 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+
+#include "absl/algorithm/container.h"
+#include "absl/base/macros.h"
+#include "absl/container/inlined_vector.h"
+#include "api/array_view.h"
+#include "api/field_trials_view.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/interface/libvpx_interface.h"
+#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h"
+#include "modules/video_coding/frame_dependencies_calculator.h"
+#include "rtc_base/numerics/safe_compare.h"
+#include "test/fuzzers/fuzz_data_helper.h"
+
+// Fuzzer simulates various svc configurations and libvpx encoder dropping
+// layer frames.
+// Validates vp9 encoder wrapper produces consistent frame references.
+namespace webrtc {
+namespace {
+
+using test::FuzzDataHelper;
+
+constexpr int kBitrateEnabledBps = 100'000;
+
+class FrameValidator : public EncodedImageCallback {
+ public:
+ ~FrameValidator() override = default;
+
+ Result OnEncodedImage(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) override {
+ RTC_CHECK(codec_specific_info);
+ RTC_CHECK_EQ(codec_specific_info->codecType, kVideoCodecVP9);
+ if (codec_specific_info->codecSpecific.VP9.first_frame_in_picture) {
+ ++picture_id_;
+ }
+ int64_t frame_id = frame_id_++;
+ LayerFrame& layer_frame = frames_[frame_id % kMaxFrameHistorySize];
+ layer_frame.picture_id = picture_id_;
+ layer_frame.spatial_id = encoded_image.SpatialIndex().value_or(0);
+ layer_frame.frame_id = frame_id;
+ layer_frame.temporal_id =
+ codec_specific_info->codecSpecific.VP9.temporal_idx;
+ if (layer_frame.temporal_id == kNoTemporalIdx) {
+ layer_frame.temporal_id = 0;
+ }
+ layer_frame.vp9_non_ref_for_inter_layer_pred =
+ codec_specific_info->codecSpecific.VP9.non_ref_for_inter_layer_pred;
+ CheckVp9References(layer_frame, codec_specific_info->codecSpecific.VP9);
+
+ if (codec_specific_info->generic_frame_info.has_value()) {
+ absl::InlinedVector<int64_t, 5> frame_dependencies =
+ dependencies_calculator_.FromBuffersUsage(
+ frame_id,
+ codec_specific_info->generic_frame_info->encoder_buffers);
+
+ CheckGenericReferences(frame_dependencies,
+ *codec_specific_info->generic_frame_info);
+ CheckGenericAndCodecSpecificReferencesAreConsistent(
+ frame_dependencies, *codec_specific_info, layer_frame);
+ }
+
+ return Result(Result::OK);
+ }
+
+ private:
+ // With 4 spatial layers and patterns up to 8 pictures, it should be enough to
+ // keep the last 32 frames to validate dependencies.
+ static constexpr size_t kMaxFrameHistorySize = 32;
+ struct LayerFrame {
+ int64_t frame_id;
+ int64_t picture_id;
+ int spatial_id;
+ int temporal_id;
+ bool vp9_non_ref_for_inter_layer_pred;
+ };
+
+ void CheckVp9References(const LayerFrame& layer_frame,
+ const CodecSpecificInfoVP9& vp9_info) {
+ if (layer_frame.frame_id == 0) {
+ RTC_CHECK(!vp9_info.inter_layer_predicted);
+ } else {
+ const LayerFrame& previous_frame = Frame(layer_frame.frame_id - 1);
+ if (vp9_info.inter_layer_predicted) {
+ RTC_CHECK(!previous_frame.vp9_non_ref_for_inter_layer_pred);
+ RTC_CHECK_EQ(layer_frame.picture_id, previous_frame.picture_id);
+ }
+ if (previous_frame.picture_id == layer_frame.picture_id) {
+ RTC_CHECK_GT(layer_frame.spatial_id, previous_frame.spatial_id);
+ // The check below would fail for temporal shift structures. Remove it
+ // or move it to !flexible_mode section when vp9 encoder starts
+ // supporting such structures.
+ RTC_CHECK_EQ(layer_frame.temporal_id, previous_frame.temporal_id);
+ }
+ }
+ if (!vp9_info.flexible_mode) {
+ if (vp9_info.gof.num_frames_in_gof > 0) {
+ gof_.CopyGofInfoVP9(vp9_info.gof);
+ }
+ RTC_CHECK_EQ(gof_.temporal_idx[vp9_info.gof_idx],
+ layer_frame.temporal_id);
+ }
+ }
+
+ void CheckGenericReferences(rtc::ArrayView<const int64_t> frame_dependencies,
+ const GenericFrameInfo& generic_info) const {
+ for (int64_t dependency_frame_id : frame_dependencies) {
+ RTC_CHECK_GE(dependency_frame_id, 0);
+ const LayerFrame& dependency = Frame(dependency_frame_id);
+ RTC_CHECK_GE(generic_info.spatial_id, dependency.spatial_id);
+ RTC_CHECK_GE(generic_info.temporal_id, dependency.temporal_id);
+ }
+ }
+
+ void CheckGenericAndCodecSpecificReferencesAreConsistent(
+ rtc::ArrayView<const int64_t> frame_dependencies,
+ const CodecSpecificInfo& info,
+ const LayerFrame& layer_frame) const {
+ const CodecSpecificInfoVP9& vp9_info = info.codecSpecific.VP9;
+ const GenericFrameInfo& generic_info = *info.generic_frame_info;
+
+ RTC_CHECK_EQ(generic_info.spatial_id, layer_frame.spatial_id);
+ RTC_CHECK_EQ(generic_info.temporal_id, layer_frame.temporal_id);
+ auto picture_id_diffs =
+ rtc::MakeArrayView(vp9_info.p_diff, vp9_info.num_ref_pics);
+ RTC_CHECK_EQ(
+ frame_dependencies.size(),
+ picture_id_diffs.size() + (vp9_info.inter_layer_predicted ? 1 : 0));
+ for (int64_t dependency_frame_id : frame_dependencies) {
+ RTC_CHECK_GE(dependency_frame_id, 0);
+ const LayerFrame& dependency = Frame(dependency_frame_id);
+ if (dependency.spatial_id != layer_frame.spatial_id) {
+ RTC_CHECK(vp9_info.inter_layer_predicted);
+ RTC_CHECK_EQ(layer_frame.picture_id, dependency.picture_id);
+ RTC_CHECK_GT(layer_frame.spatial_id, dependency.spatial_id);
+ } else {
+ RTC_CHECK(vp9_info.inter_pic_predicted);
+ RTC_CHECK_EQ(layer_frame.spatial_id, dependency.spatial_id);
+ RTC_CHECK(absl::c_linear_search(
+ picture_id_diffs, layer_frame.picture_id - dependency.picture_id));
+ }
+ }
+ }
+
+ const LayerFrame& Frame(int64_t frame_id) const {
+ auto& frame = frames_[frame_id % kMaxFrameHistorySize];
+ RTC_CHECK_EQ(frame.frame_id, frame_id);
+ return frame;
+ }
+
+ GofInfoVP9 gof_;
+ int64_t frame_id_ = 0;
+ int64_t picture_id_ = 1;
+ FrameDependenciesCalculator dependencies_calculator_;
+ LayerFrame frames_[kMaxFrameHistorySize];
+};
+
+class FieldTrials : public FieldTrialsView {
+ public:
+ explicit FieldTrials(FuzzDataHelper& config)
+ : flags_(config.ReadOrDefaultValue<uint8_t>(0)) {}
+
+ ~FieldTrials() override = default;
+ std::string Lookup(absl::string_view key) const override {
+ static constexpr absl::string_view kBinaryFieldTrials[] = {
+ "WebRTC-Vp9ExternalRefCtrl",
+ "WebRTC-Vp9IssueKeyFrameOnLayerDeactivation",
+ };
+ for (size_t i = 0; i < ABSL_ARRAYSIZE(kBinaryFieldTrials); ++i) {
+ if (key == kBinaryFieldTrials[i]) {
+ return (flags_ & (1u << i)) ? "Enabled" : "Disabled";
+ }
+ }
+
+ // Ignore following field trials.
+ if (key == "WebRTC-CongestionWindow" ||
+ key == "WebRTC-UseBaseHeavyVP8TL3RateAllocation" ||
+ key == "WebRTC-SimulcastUpswitchHysteresisPercent" ||
+ key == "WebRTC-SimulcastScreenshareUpswitchHysteresisPercent" ||
+ key == "WebRTC-VideoRateControl" ||
+ key == "WebRTC-VP9-PerformanceFlags" ||
+ key == "WebRTC-VP9VariableFramerateScreenshare" ||
+ key == "WebRTC-VP9QualityScaler") {
+ return "";
+ }
+ // Crash when using unexpected field trial to decide if it should be fuzzed
+ // or have a constant value.
+ RTC_CHECK(false) << "Unfuzzed field trial " << key << "\n";
+ }
+
+ private:
+ const uint8_t flags_;
+};
+
+VideoCodec CodecSettings(FuzzDataHelper& rng) {
+ uint16_t config = rng.ReadOrDefaultValue<uint16_t>(0);
+ // Test up to to 4 spatial and 4 temporal layers.
+ int num_spatial_layers = 1 + (config & 0b11);
+ int num_temporal_layers = 1 + ((config >> 2) & 0b11);
+
+ VideoCodec codec_settings = {};
+ codec_settings.codecType = kVideoCodecVP9;
+ codec_settings.maxFramerate = 30;
+ codec_settings.width = 320 << (num_spatial_layers - 1);
+ codec_settings.height = 180 << (num_spatial_layers - 1);
+ if (num_spatial_layers > 1) {
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ SpatialLayer& spatial_layer = codec_settings.spatialLayers[sid];
+ codec_settings.width = 320 << sid;
+ codec_settings.height = 180 << sid;
+ spatial_layer.width = codec_settings.width;
+ spatial_layer.height = codec_settings.height;
+ spatial_layer.targetBitrate = kBitrateEnabledBps * num_temporal_layers;
+ spatial_layer.maxFramerate = codec_settings.maxFramerate;
+ spatial_layer.numberOfTemporalLayers = num_temporal_layers;
+ }
+ }
+ codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers;
+ codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers;
+ int inter_layer_pred = (config >> 4) & 0b11;
+ // There are only 3 valid values.
+ codec_settings.VP9()->interLayerPred = static_cast<InterLayerPredMode>(
+ inter_layer_pred < 3 ? inter_layer_pred : 0);
+ codec_settings.VP9()->flexibleMode = (config & (1u << 6)) != 0;
+ codec_settings.SetFrameDropEnabled((config & (1u << 7)) != 0);
+ codec_settings.mode = VideoCodecMode::kRealtimeVideo;
+ return codec_settings;
+}
+
+VideoEncoder::Settings EncoderSettings() {
+ return VideoEncoder::Settings(VideoEncoder::Capabilities(false),
+ /*number_of_cores=*/1,
+ /*max_payload_size=*/0);
+}
+
+bool IsSupported(int num_spatial_layers,
+ int num_temporal_layers,
+ const VideoBitrateAllocation& allocation) {
+ // VP9 encoder doesn't support certain configurations.
+ // BitrateAllocator shouldn't produce them.
+ if (allocation.get_sum_bps() == 0) {
+ // Ignore allocation that turns off all the layers.
+ // In such a case it is up to upper layer code not to call Encode.
+ return false;
+ }
+
+ for (int tid = 0; tid < num_temporal_layers; ++tid) {
+ int min_enabled_spatial_id = -1;
+ int max_enabled_spatial_id = -1;
+ int num_enabled_spatial_layers = 0;
+ for (int sid = 0; sid < num_spatial_layers; ++sid) {
+ if (allocation.GetBitrate(sid, tid) > 0) {
+ if (min_enabled_spatial_id == -1) {
+ min_enabled_spatial_id = sid;
+ }
+ max_enabled_spatial_id = sid;
+ ++num_enabled_spatial_layers;
+ }
+ }
+ if (num_enabled_spatial_layers == 0) {
+ // Each temporal layer should be enabled because skipping a full frame is
+ // not supported in non-flexible mode.
+ return false;
+ }
+ if (max_enabled_spatial_id - min_enabled_spatial_id + 1 !=
+ num_enabled_spatial_layers) {
+ // To avoid odd spatial dependencies, there should be no gaps in active
+ // spatial layers.
+ return false;
+ }
+ }
+
+ return true;
+}
+
+struct LibvpxState {
+ LibvpxState() {
+ pkt.kind = VPX_CODEC_CX_FRAME_PKT;
+ pkt.data.frame.buf = pkt_buffer;
+ pkt.data.frame.sz = ABSL_ARRAYSIZE(pkt_buffer);
+ layer_id.spatial_layer_id = -1;
+ }
+
+ uint8_t pkt_buffer[1000] = {};
+ vpx_codec_enc_cfg_t config = {};
+ vpx_codec_priv_output_cx_pkt_cb_pair_t callback = {};
+ vpx_image_t img = {};
+ vpx_svc_ref_frame_config_t ref_config = {};
+ vpx_svc_layer_id_t layer_id = {};
+ vpx_svc_frame_drop_t frame_drop = {};
+ vpx_codec_cx_pkt pkt = {};
+};
+
+class StubLibvpx : public LibvpxInterface {
+ public:
+ explicit StubLibvpx(LibvpxState* state) : state_(state) { RTC_CHECK(state_); }
+
+ vpx_codec_err_t codec_enc_config_default(vpx_codec_iface_t* iface,
+ vpx_codec_enc_cfg_t* cfg,
+ unsigned int usage) const override {
+ state_->config = *cfg;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_enc_init(vpx_codec_ctx_t* ctx,
+ vpx_codec_iface_t* iface,
+ const vpx_codec_enc_cfg_t* cfg,
+ vpx_codec_flags_t flags) const override {
+ RTC_CHECK(ctx);
+ ctx->err = VPX_CODEC_OK;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_image_t* img_wrap(vpx_image_t* img,
+ vpx_img_fmt_t fmt,
+ unsigned int d_w,
+ unsigned int d_h,
+ unsigned int stride_align,
+ unsigned char* img_data) const override {
+ state_->img.fmt = fmt;
+ state_->img.d_w = d_w;
+ state_->img.d_h = d_h;
+ return &state_->img;
+ }
+
+ vpx_codec_err_t codec_encode(vpx_codec_ctx_t* ctx,
+ const vpx_image_t* img,
+ vpx_codec_pts_t pts,
+ uint64_t duration,
+ vpx_enc_frame_flags_t flags,
+ uint64_t deadline) const override {
+ if (flags & VPX_EFLAG_FORCE_KF) {
+ state_->pkt.data.frame.flags = VPX_FRAME_IS_KEY;
+ } else {
+ state_->pkt.data.frame.flags = 0;
+ }
+ state_->pkt.data.frame.duration = duration;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ void* param) const override {
+ if (ctrl_id == VP9E_REGISTER_CX_CALLBACK) {
+ state_->callback =
+ *reinterpret_cast<vpx_codec_priv_output_cx_pkt_cb_pair_t*>(param);
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(
+ vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_ref_frame_config_t* param) const override {
+ switch (ctrl_id) {
+ case VP9E_SET_SVC_REF_FRAME_CONFIG:
+ state_->ref_config = *param;
+ break;
+ case VP9E_GET_SVC_REF_FRAME_CONFIG:
+ *param = state_->ref_config;
+ break;
+ default:
+ break;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_layer_id_t* param) const override {
+ switch (ctrl_id) {
+ case VP9E_SET_SVC_LAYER_ID:
+ state_->layer_id = *param;
+ break;
+ case VP9E_GET_SVC_LAYER_ID:
+ *param = state_->layer_id;
+ break;
+ default:
+ break;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_frame_drop_t* param) const override {
+ if (ctrl_id == VP9E_SET_SVC_FRAME_DROP_LAYER) {
+ state_->frame_drop = *param;
+ }
+ return VPX_CODEC_OK;
+ }
+
+ vpx_codec_err_t codec_enc_config_set(
+ vpx_codec_ctx_t* ctx,
+ const vpx_codec_enc_cfg_t* cfg) const override {
+ state_->config = *cfg;
+ return VPX_CODEC_OK;
+ }
+
+ vpx_image_t* img_alloc(vpx_image_t* img,
+ vpx_img_fmt_t fmt,
+ unsigned int d_w,
+ unsigned int d_h,
+ unsigned int align) const override {
+ return nullptr;
+ }
+ void img_free(vpx_image_t* img) const override {}
+ vpx_codec_err_t codec_enc_init_multi(vpx_codec_ctx_t* ctx,
+ vpx_codec_iface_t* iface,
+ vpx_codec_enc_cfg_t* cfg,
+ int num_enc,
+ vpx_codec_flags_t flags,
+ vpx_rational_t* dsf) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_destroy(vpx_codec_ctx_t* ctx) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ uint32_t param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ int param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ int* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_roi_map* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_active_map* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_scaling_mode* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_extra_cfg_t* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(
+ vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_svc_spatial_layer_sync_t* param) const override {
+ return VPX_CODEC_OK;
+ }
+ vpx_codec_err_t codec_control(vpx_codec_ctx_t* ctx,
+ vp8e_enc_control_id ctrl_id,
+ vpx_rc_funcs_t* param) const override {
+ return VPX_CODEC_OK;
+ }
+ const vpx_codec_cx_pkt_t* codec_get_cx_data(
+ vpx_codec_ctx_t* ctx,
+ vpx_codec_iter_t* iter) const override {
+ return nullptr;
+ }
+ const char* codec_error_detail(vpx_codec_ctx_t* ctx) const override {
+ return nullptr;
+ }
+ const char* codec_error(vpx_codec_ctx_t* ctx) const override {
+ return nullptr;
+ }
+ const char* codec_err_to_string(vpx_codec_err_t err) const override {
+ return nullptr;
+ }
+
+ private:
+ LibvpxState* const state_;
+};
+
+enum Actions {
+ kEncode,
+ kSetRates,
+};
+
+// When a layer frame is marked for drop, drops all layer frames from that
+// pictures with larger spatial ids.
+constexpr bool DropAbove(uint8_t layers_mask, int sid) {
+ uint8_t full_mask = (uint8_t{1} << (sid + 1)) - 1;
+ return (layers_mask & full_mask) != full_mask;
+}
+// inline unittests
+static_assert(DropAbove(0b1011, /*sid=*/0) == false, "");
+static_assert(DropAbove(0b1011, /*sid=*/1) == false, "");
+static_assert(DropAbove(0b1011, /*sid=*/2) == true, "");
+static_assert(DropAbove(0b1011, /*sid=*/3) == true, "");
+
+// When a layer frame is marked for drop, drops all layer frames from that
+// pictures with smaller spatial ids.
+constexpr bool DropBelow(uint8_t layers_mask, int sid, int num_layers) {
+ return (layers_mask >> sid) != (1 << (num_layers - sid)) - 1;
+}
+// inline unittests
+static_assert(DropBelow(0b1101, /*sid=*/0, 4) == true, "");
+static_assert(DropBelow(0b1101, /*sid=*/1, 4) == true, "");
+static_assert(DropBelow(0b1101, /*sid=*/2, 4) == false, "");
+static_assert(DropBelow(0b1101, /*sid=*/3, 4) == false, "");
+
+} // namespace
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ FuzzDataHelper helper(rtc::MakeArrayView(data, size));
+
+ FrameValidator validator;
+ FieldTrials field_trials(helper);
+ // Setup call callbacks for the fake
+ LibvpxState state;
+
+ // Initialize encoder
+ LibvpxVp9Encoder encoder(cricket::VideoCodec(),
+ std::make_unique<StubLibvpx>(&state), field_trials);
+ VideoCodec codec = CodecSettings(helper);
+ if (encoder.InitEncode(&codec, EncoderSettings()) != WEBRTC_VIDEO_CODEC_OK) {
+ return;
+ }
+ RTC_CHECK_EQ(encoder.RegisterEncodeCompleteCallback(&validator),
+ WEBRTC_VIDEO_CODEC_OK);
+ {
+ // Enable all the layers initially. Encoder doesn't support producing
+ // frames when no layers are enabled.
+ LibvpxVp9Encoder::RateControlParameters parameters;
+ parameters.framerate_fps = 30.0;
+ for (int sid = 0; sid < codec.VP9()->numberOfSpatialLayers; ++sid) {
+ for (int tid = 0; tid < codec.VP9()->numberOfTemporalLayers; ++tid) {
+ parameters.bitrate.SetBitrate(sid, tid, kBitrateEnabledBps);
+ }
+ }
+ encoder.SetRates(parameters);
+ }
+
+ std::vector<VideoFrameType> frame_types(1);
+ VideoFrame fake_image = VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(
+ int{codec.width}, int{codec.height}))
+ .build();
+
+ // Start producing frames at random.
+ while (helper.CanReadBytes(1)) {
+ uint8_t action = helper.Read<uint8_t>();
+ switch (action & 0b11) {
+ case kEncode: {
+ // bitmask of the action: SSSS-K00, where
+ // four S bit indicate which spatial layers should be produced,
+ // K bit indicates if frame should be a key frame.
+ frame_types[0] = (action & 0b100) ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+ encoder.Encode(fake_image, &frame_types);
+ uint8_t encode_spatial_layers = (action >> 4);
+ for (size_t sid = 0; sid < state.config.ss_number_layers; ++sid) {
+ if (state.config.ss_target_bitrate[sid] == 0) {
+ // Don't encode disabled spatial layers.
+ continue;
+ }
+ bool drop = true;
+ switch (state.frame_drop.framedrop_mode) {
+ case FULL_SUPERFRAME_DROP:
+ drop = encode_spatial_layers == 0;
+ break;
+ case LAYER_DROP:
+ drop = (encode_spatial_layers & (1 << sid)) == 0;
+ break;
+ case CONSTRAINED_LAYER_DROP:
+ drop = DropBelow(encode_spatial_layers, sid,
+ state.config.ss_number_layers);
+ break;
+ case CONSTRAINED_FROM_ABOVE_DROP:
+ drop = DropAbove(encode_spatial_layers, sid);
+ break;
+ }
+ if (!drop) {
+ state.layer_id.spatial_layer_id = sid;
+ state.callback.output_cx_pkt(&state.pkt, state.callback.user_priv);
+ }
+ }
+ } break;
+ case kSetRates: {
+ // bitmask of the action: (S2)(S1)(S0)01,
+ // where Sx is number of temporal layers to enable for spatial layer x
+ // In pariculat Sx = 0 indicates spatial layer x should be disabled.
+ LibvpxVp9Encoder::RateControlParameters parameters;
+ parameters.framerate_fps = 30.0;
+ for (int sid = 0; sid < codec.VP9()->numberOfSpatialLayers; ++sid) {
+ int temporal_layers = (action >> ((1 + sid) * 2)) & 0b11;
+ for (int tid = 0; tid < temporal_layers; ++tid) {
+ parameters.bitrate.SetBitrate(sid, tid, kBitrateEnabledBps);
+ }
+ }
+ if (IsSupported(codec.VP9()->numberOfSpatialLayers,
+ codec.VP9()->numberOfTemporalLayers,
+ parameters.bitrate)) {
+ encoder.SetRates(parameters);
+ }
+ } break;
+ default:
+ // Unspecificed values are noop.
+ break;
+ }
+ }
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp9_qp_parser_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp9_qp_parser_fuzzer.cc
new file mode 100644
index 0000000000..80dfe15b16
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp9_qp_parser_fuzzer.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+
+namespace webrtc {
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ ParseUncompressedVp9Header(rtc::MakeArrayView(data, size));
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/vp9_replay_fuzzer.cc b/third_party/libwebrtc/test/fuzzers/vp9_replay_fuzzer.cc
new file mode 100644
index 0000000000..fc10d9ffc7
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/vp9_replay_fuzzer.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+
+#include "test/fuzzers/utils/rtp_replayer.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ auto stream_state = std::make_unique<test::RtpReplayer::StreamState>();
+ VideoReceiveStreamInterface::Config vp9_config(&(stream_state->transport));
+
+ VideoReceiveStreamInterface::Decoder vp9_decoder;
+ vp9_decoder.video_format = SdpVideoFormat("VP9");
+ vp9_decoder.payload_type = 124;
+ vp9_config.decoders.push_back(std::move(vp9_decoder));
+
+ vp9_config.rtp.local_ssrc = 7731;
+ vp9_config.rtp.remote_ssrc = 1337;
+ vp9_config.rtp.rtx_ssrc = 100;
+ vp9_config.rtp.nack.rtp_history_ms = 1000;
+
+ std::vector<VideoReceiveStreamInterface::Config> replay_configs;
+ replay_configs.push_back(std::move(vp9_config));
+
+ test::RtpReplayer::Replay(std::move(stream_state), std::move(replay_configs),
+ data, size);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/fuzzers/webrtc_fuzzer_main.cc b/third_party/libwebrtc/test/fuzzers/webrtc_fuzzer_main.cc
new file mode 100644
index 0000000000..a52dd231be
--- /dev/null
+++ b/third_party/libwebrtc/test/fuzzers/webrtc_fuzzer_main.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file is intended to provide a common interface for fuzzing functions.
+// It's intended to set sane defaults, such as removing logging for further
+// fuzzing efficiency.
+
+#include "rtc_base/logging.h"
+
+namespace {
+bool g_initialized = false;
+void InitializeWebRtcFuzzDefaults() {
+ if (g_initialized)
+ return;
+
+// Remove default logging to prevent huge slowdowns.
+// TODO(pbos): Disable in Chromium: http://crbug.com/561667
+#if !defined(WEBRTC_CHROMIUM_BUILD)
+ rtc::LogMessage::LogToDebug(rtc::LS_NONE);
+#endif // !defined(WEBRTC_CHROMIUM_BUILD)
+
+ g_initialized = true;
+}
+} // namespace
+
+namespace webrtc {
+extern void FuzzOneInput(const uint8_t* data, size_t size);
+} // namespace webrtc
+
+extern "C" int LLVMFuzzerTestOneInput(const unsigned char* data, size_t size) {
+ InitializeWebRtcFuzzDefaults();
+ webrtc::FuzzOneInput(data, size);
+ return 0;
+}
diff --git a/third_party/libwebrtc/test/gl/gl_renderer.cc b/third_party/libwebrtc/test/gl/gl_renderer.cc
new file mode 100644
index 0000000000..10162ee056
--- /dev/null
+++ b/third_party/libwebrtc/test/gl/gl_renderer.cc
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/gl/gl_renderer.h"
+
+#include <string.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+GlRenderer::GlRenderer()
+ : is_init_(false), buffer_(NULL), width_(0), height_(0) {}
+
+void GlRenderer::Init() {
+ RTC_DCHECK(!is_init_);
+ is_init_ = true;
+
+ glGenTextures(1, &texture_);
+}
+
+void GlRenderer::Destroy() {
+ if (!is_init_) {
+ return;
+ }
+
+ is_init_ = false;
+
+ delete[] buffer_;
+ buffer_ = NULL;
+
+ glDeleteTextures(1, &texture_);
+}
+
+void GlRenderer::ResizeViewport(size_t width, size_t height) {
+ // TODO(pbos): Aspect ratio, letterbox the video.
+ glViewport(0, 0, width, height);
+
+ glMatrixMode(GL_PROJECTION);
+ glLoadIdentity();
+ glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ glOrtho(0.0f, 1.0f, 1.0f, 0.0f, -1.0f, 1.0f);
+ glMatrixMode(GL_MODELVIEW);
+}
+
+void GlRenderer::ResizeVideo(size_t width, size_t height) {
+ RTC_DCHECK(is_init_);
+ width_ = width;
+ height_ = height;
+
+ buffer_size_ = width * height * 4; // BGRA
+
+ delete[] buffer_;
+ buffer_ = new uint8_t[buffer_size_];
+ RTC_DCHECK(buffer_);
+ memset(buffer_, 0, buffer_size_);
+ glBindTexture(GL_TEXTURE_2D, texture_);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, 0);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_BGRA,
+ GL_UNSIGNED_INT_8_8_8_8, static_cast<GLvoid*>(buffer_));
+}
+
+void GlRenderer::OnFrame(const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(is_init_);
+
+ if (static_cast<size_t>(frame.width()) != width_ ||
+ static_cast<size_t>(frame.height()) != height_) {
+ ResizeVideo(frame.width(), frame.height());
+ }
+
+ webrtc::ConvertFromI420(frame, VideoType::kBGRA, 0, buffer_);
+
+ glEnable(GL_TEXTURE_2D);
+ glBindTexture(GL_TEXTURE_2D, texture_);
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width_, height_, GL_BGRA,
+ GL_UNSIGNED_INT_8_8_8_8, buffer_);
+
+ glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+
+ glLoadIdentity();
+
+ glBegin(GL_QUADS);
+ {
+ glTexCoord2f(0.0f, 0.0f);
+ glVertex3f(0.0f, 0.0f, 0.0f);
+
+ glTexCoord2f(0.0f, 1.0f);
+ glVertex3f(0.0f, 1.0f, 0.0f);
+
+ glTexCoord2f(1.0f, 1.0f);
+ glVertex3f(1.0f, 1.0f, 0.0f);
+
+ glTexCoord2f(1.0f, 0.0f);
+ glVertex3f(1.0f, 0.0f, 0.0f);
+ }
+ glEnd();
+
+ glBindTexture(GL_TEXTURE_2D, 0);
+ glFlush();
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/gl/gl_renderer.h b/third_party/libwebrtc/test/gl/gl_renderer.h
new file mode 100644
index 0000000000..8338591244
--- /dev/null
+++ b/third_party/libwebrtc/test/gl/gl_renderer.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_GL_GL_RENDERER_H_
+#define TEST_GL_GL_RENDERER_H_
+
+#ifdef WEBRTC_MAC
+#include <OpenGL/gl.h>
+#else
+#include <GL/gl.h>
+#endif
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/video/video_frame.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+class GlRenderer : public VideoRenderer {
+ public:
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ protected:
+ GlRenderer();
+
+ void Init();
+ void Destroy();
+
+ void ResizeViewport(size_t width, size_t height);
+
+ private:
+ bool is_init_;
+ uint8_t* buffer_;
+ GLuint texture_;
+ size_t width_, height_, buffer_size_;
+
+ void ResizeVideo(size_t width, size_t height);
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_GL_GL_RENDERER_H_
diff --git a/third_party/libwebrtc/test/gmock.h b/third_party/libwebrtc/test/gmock.h
new file mode 100644
index 0000000000..f137d080a4
--- /dev/null
+++ b/third_party/libwebrtc/test/gmock.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_GMOCK_H_
+#define TEST_GMOCK_H_
+
+#include "rtc_base/ignore_wundef.h"
+
+RTC_PUSH_IGNORING_WUNDEF()
+#include "testing/gmock/include/gmock/gmock.h"
+RTC_POP_IGNORING_WUNDEF()
+
+#endif // TEST_GMOCK_H_
diff --git a/third_party/libwebrtc/test/gtest.h b/third_party/libwebrtc/test/gtest.h
new file mode 100644
index 0000000000..fa4396420e
--- /dev/null
+++ b/third_party/libwebrtc/test/gtest.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_GTEST_H_
+#define TEST_GTEST_H_
+
+#include "rtc_base/ignore_wundef.h"
+
+RTC_PUSH_IGNORING_WUNDEF()
+#include "testing/gtest/include/gtest/gtest-spi.h"
+#include "testing/gtest/include/gtest/gtest.h"
+RTC_POP_IGNORING_WUNDEF()
+
+// GTEST_HAS_DEATH_TEST is set to 1 when death tests are supported, but appears
+// to be left unset if they're not supported. Rather than depend on this, we
+// set it to 0 ourselves here.
+#ifndef GTEST_HAS_DEATH_TEST
+#define GTEST_HAS_DEATH_TEST 0
+#endif
+
+#endif // TEST_GTEST_H_
diff --git a/third_party/libwebrtc/test/ios/Info.plist b/third_party/libwebrtc/test/ios/Info.plist
new file mode 100644
index 0000000000..fe06a5e005
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/Info.plist
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>English</string>
+ <key>CFBundleDisplayName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundleExecutable</key>
+ <string>${EXECUTABLE_NAME}</string>
+ <key>CFBundleIdentifier</key>
+ <string>${BUNDLE_IDENTIFIER}</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>${PRODUCT_NAME}</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>LSRequiresIPhoneOS</key>
+ <true/>
+ <key>NSAppTransportSecurity</key>
+ <dict>
+ <key>NSAllowsArbitraryLoads</key>
+ <true/>
+ </dict>
+ <key>UIFileSharingEnabled</key>
+ <true/>
+ <key>UISupportedInterfaceOrientation</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ <string>UIInterfaceOrientationLandscapeLeft</string>
+ <string>UIInterfaceOrientationLandscapeRight</string>
+ </array>
+ <key>UIBackgroundModes</key>
+ <array>
+ <string>fetch</string>
+ </array>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>For testing purposes</string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/test/ios/coverage_util_ios.h b/third_party/libwebrtc/test/ios/coverage_util_ios.h
new file mode 100644
index 0000000000..a17b69dca8
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/coverage_util_ios.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_IOS_COVERAGE_UTIL_IOS_H_
+#define TEST_IOS_COVERAGE_UTIL_IOS_H_
+
+namespace rtc {
+namespace test {
+
+// In debug builds, if IOS_ENABLE_COVERAGE is defined, sets the filename of the
+// coverage file. Otherwise, it does nothing.
+void ConfigureCoverageReportPath();
+
+} // namespace test
+} // namespace rtc
+
+#endif // TEST_IOS_COVERAGE_UTIL_IOS_H_
diff --git a/third_party/libwebrtc/test/ios/coverage_util_ios.mm b/third_party/libwebrtc/test/ios/coverage_util_ios.mm
new file mode 100644
index 0000000000..c21a16def2
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/coverage_util_ios.mm
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#ifdef WEBRTC_IOS_ENABLE_COVERAGE
+extern "C" void __llvm_profile_set_filename(const char* name);
+#endif
+
+namespace rtc {
+namespace test {
+
+void ConfigureCoverageReportPath() {
+#ifdef WEBRTC_IOS_ENABLE_COVERAGE
+ static dispatch_once_t once_token;
+ dispatch_once(&once_token, ^{
+ // Writes the profraw file to the Documents directory, where the app has
+ // write rights.
+ NSArray* paths =
+ NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+ NSString* documents_directory = [paths firstObject];
+ NSString* file_name = [documents_directory stringByAppendingPathComponent:@"coverage.profraw"];
+
+ // For documentation, see:
+ // http://clang.llvm.org/docs/SourceBasedCodeCoverage.html
+ __llvm_profile_set_filename([file_name cStringUsingEncoding:NSUTF8StringEncoding]);
+
+ // Print the path for easier retrieval.
+ NSLog(@"Coverage data at %@.", file_name);
+ });
+#endif // ifdef WEBRTC_IOS_ENABLE_COVERAGE
+}
+
+} // namespace test
+} // namespace rtc
diff --git a/third_party/libwebrtc/test/ios/google_test_runner.mm b/third_party/libwebrtc/test/ios/google_test_runner.mm
new file mode 100644
index 0000000000..87b7f7dfd7
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/google_test_runner.mm
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Copied from Chromium base/test/ios/google_test_runner.mm to avoid
+// the //base dependency. The protocol is required to run iOS Unittest.
+
+#import <UIKit/UIKit.h>
+#import <XCTest/XCTest.h>
+
+#import "test/ios/google_test_runner_delegate.h"
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+@interface GoogleTestRunner : XCTestCase
+@end
+
+@implementation GoogleTestRunner
+
+- (void)testRunGoogleTests {
+ self.continueAfterFailure = false;
+
+ id appDelegate = UIApplication.sharedApplication.delegate;
+ XCTAssertTrue([appDelegate conformsToProtocol:@protocol(GoogleTestRunnerDelegate)]);
+
+ id<GoogleTestRunnerDelegate> runnerDelegate =
+ static_cast<id<GoogleTestRunnerDelegate>>(appDelegate);
+ XCTAssertTrue(runnerDelegate.supportsRunningGoogleTests);
+ XCTAssertTrue([runnerDelegate runGoogleTests] == 0);
+}
+
+@end
diff --git a/third_party/libwebrtc/test/ios/google_test_runner_delegate.h b/third_party/libwebrtc/test/ios/google_test_runner_delegate.h
new file mode 100644
index 0000000000..78ee59e028
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/google_test_runner_delegate.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_IOS_GOOGLE_TEST_RUNNER_DELEGATE_H_
+#define TEST_IOS_GOOGLE_TEST_RUNNER_DELEGATE_H_
+
+// Copied from Chromium base/test/ios/google_test_runner_delegate.h
+// to avoid the //base dependency. This protocol is required
+// to run iOS Unittest.
+@protocol GoogleTestRunnerDelegate
+
+// Returns YES if this delegate supports running GoogleTests via a call to
+// `runGoogleTests`.
+@property(nonatomic, readonly, assign) BOOL supportsRunningGoogleTests;
+
+// Runs GoogleTests and returns the final exit code.
+- (int)runGoogleTests;
+
+@end
+
+#endif // TEST_IOS_GOOGLE_TEST_RUNNER_DELEGATE_H_
diff --git a/third_party/libwebrtc/test/ios/test_support.h b/third_party/libwebrtc/test/ios/test_support.h
new file mode 100644
index 0000000000..5ac731393f
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/test_support.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_IOS_TEST_SUPPORT_H_
+#define TEST_IOS_TEST_SUPPORT_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+
+namespace rtc {
+namespace test {
+// Launches an iOS app that serves as a host for a test suite.
+// This is necessary as iOS doesn't like processes without a gui
+// running for longer than a few seconds.
+void RunTestsFromIOSApp();
+void InitTestSuite(int (*test_suite)(void),
+ int argc,
+ char* argv[],
+ bool save_chartjson_result,
+ bool export_perf_results_new_api,
+ std::string webrtc_test_metrics_output_path,
+ absl::optional<std::vector<std::string>> metrics_to_plot);
+
+// Returns true if unittests should be run by the XCTest runnner.
+bool ShouldRunIOSUnittestsWithXCTest();
+
+} // namespace test
+} // namespace rtc
+
+#endif // TEST_IOS_TEST_SUPPORT_H_
diff --git a/third_party/libwebrtc/test/ios/test_support.mm b/third_party/libwebrtc/test/ios/test_support.mm
new file mode 100644
index 0000000000..d3c9ee0c74
--- /dev/null
+++ b/third_party/libwebrtc/test/ios/test_support.mm
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <UIKit/UIKit.h>
+
+#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/metrics/metrics_exporter.h"
+#include "api/test/metrics/metrics_set_proto_file_exporter.h"
+#include "api/test/metrics/print_result_proxy_metrics_exporter.h"
+#include "api/test/metrics/stdout_metrics_exporter.h"
+#include "test/ios/coverage_util_ios.h"
+#include "test/ios/google_test_runner_delegate.h"
+#include "test/ios/test_support.h"
+#include "test/testsupport/perf_test.h"
+
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+// Springboard will kill any iOS app that fails to check in after launch within
+// a given time. Starting a UIApplication before invoking TestSuite::Run
+// prevents this from happening.
+
+// InitIOSRunHook saves the TestSuite and argc/argv, then invoking
+// RunTestsFromIOSApp calls UIApplicationMain(), providing an application
+// delegate class: WebRtcUnitTestDelegate. The delegate implements
+// application:didFinishLaunchingWithOptions: to invoke the TestSuite's Run
+// method.
+
+// Since the executable isn't likely to be a real iOS UI, the delegate puts up a
+// window displaying the app name. If a bunch of apps using MainHook are being
+// run in a row, this provides an indication of which one is currently running.
+
+// If enabled, runs unittests using the XCTest test runner.
+const char kEnableRunIOSUnittestsWithXCTest[] = "enable-run-ios-unittests-with-xctest";
+
+static int (*g_test_suite)(void) = NULL;
+static int g_argc;
+static char **g_argv;
+static bool g_write_perf_output;
+static bool g_export_perf_results_new_api;
+static std::string g_webrtc_test_metrics_output_path;
+static absl::optional<bool> g_is_xctest;
+static absl::optional<std::vector<std::string>> g_metrics_to_plot;
+
+@interface UIApplication (Testing)
+- (void)_terminateWithStatus:(int)status;
+@end
+
+@interface WebRtcUnitTestDelegate : NSObject <GoogleTestRunnerDelegate> {
+ UIWindow *_window;
+}
+- (void)runTests;
+@end
+
+@implementation WebRtcUnitTestDelegate
+
+- (BOOL)application:(UIApplication *)application
+ didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+ CGRect bounds = [[UIScreen mainScreen] bounds];
+
+ _window = [[UIWindow alloc] initWithFrame:bounds];
+ [_window setBackgroundColor:[UIColor whiteColor]];
+ [_window makeKeyAndVisible];
+
+ // Add a label with the app name.
+ UILabel *label = [[UILabel alloc] initWithFrame:bounds];
+ label.text = [[NSProcessInfo processInfo] processName];
+ label.textAlignment = NSTextAlignmentCenter;
+ [_window addSubview:label];
+
+ // An NSInternalInconsistencyException is thrown if the app doesn't have a
+ // root view controller. Set an empty one here.
+ [_window setRootViewController:[[UIViewController alloc] init]];
+
+ if (!rtc::test::ShouldRunIOSUnittestsWithXCTest()) {
+ // When running in XCTest mode, XCTest will invoke `runGoogleTest` directly.
+ // Otherwise, schedule a call to `runTests`.
+ [self performSelector:@selector(runTests) withObject:nil afterDelay:0.1];
+ }
+
+ return YES;
+}
+
+- (BOOL)supportsRunningGoogleTests {
+ return rtc::test::ShouldRunIOSUnittestsWithXCTest();
+}
+
+- (int)runGoogleTests {
+ rtc::test::ConfigureCoverageReportPath();
+
+ int exitStatus = g_test_suite();
+
+ NSArray<NSString *> *outputDirectories =
+ NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+ std::vector<std::unique_ptr<webrtc::test::MetricsExporter>> exporters;
+ if (g_export_perf_results_new_api) {
+ exporters.push_back(std::make_unique<webrtc::test::StdoutMetricsExporter>());
+ if (g_write_perf_output) {
+ // Stores data into a proto file under the app's document directory.
+ NSString *fileName = @"perftest-output.pb";
+ if ([outputDirectories count] != 0) {
+ NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName];
+
+ exporters.push_back(std::make_unique<webrtc::test::ChromePerfDashboardMetricsExporter>(
+ [NSString stdStringForString:outputPath]));
+ }
+ }
+ if (!g_webrtc_test_metrics_output_path.empty()) {
+ RTC_CHECK_EQ(g_webrtc_test_metrics_output_path.find('/'), std::string::npos)
+ << "On iOS, --webrtc_test_metrics_output_path must only be a file name.";
+ if ([outputDirectories count] != 0) {
+ NSString *fileName = [NSString stringWithCString:g_webrtc_test_metrics_output_path.c_str()
+ encoding:[NSString defaultCStringEncoding]];
+ NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName];
+ exporters.push_back(std::make_unique<webrtc::test::MetricsSetProtoFileExporter>(
+ webrtc::test::MetricsSetProtoFileExporter::Options(
+ [NSString stdStringForString:outputPath])));
+ }
+ }
+ } else {
+ exporters.push_back(std::make_unique<webrtc::test::PrintResultProxyMetricsExporter>());
+ }
+ webrtc::test::ExportPerfMetric(*webrtc::test::GetGlobalMetricsLogger(), std::move(exporters));
+ if (!g_export_perf_results_new_api) {
+ if (g_write_perf_output) {
+ // Stores data into a proto file under the app's document directory.
+ NSString *fileName = @"perftest-output.pb";
+ if ([outputDirectories count] != 0) {
+ NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName];
+
+ if (!webrtc::test::WritePerfResults([NSString stdStringForString:outputPath])) {
+ return 1;
+ }
+ }
+ }
+ if (g_metrics_to_plot) {
+ webrtc::test::PrintPlottableResults(*g_metrics_to_plot);
+ }
+ }
+
+ return exitStatus;
+}
+
+- (void)runTests {
+ RTC_DCHECK(!rtc::test::ShouldRunIOSUnittestsWithXCTest());
+ rtc::test::ConfigureCoverageReportPath();
+
+ int exitStatus = [self runGoogleTests];
+
+ // If a test app is too fast, it will exit before Instruments has has a
+ // a chance to initialize and no test results will be seen.
+ // TODO(crbug.com/137010): Figure out how much time is actually needed, and
+ // sleep only to make sure that much time has elapsed since launch.
+ [NSThread sleepUntilDate:[NSDate dateWithTimeIntervalSinceNow:2.0]];
+
+ // Use the hidden selector to try and cleanly take down the app (otherwise
+ // things can think the app crashed even on a zero exit status).
+ UIApplication *application = [UIApplication sharedApplication];
+ [application _terminateWithStatus:exitStatus];
+
+ exit(exitStatus);
+}
+
+@end
+namespace rtc {
+namespace test {
+
+// Note: This is not thread safe, and must be called from the same thread as
+// runTests above.
+void InitTestSuite(int (*test_suite)(void),
+ int argc,
+ char *argv[],
+ bool write_perf_output,
+ bool export_perf_results_new_api,
+ std::string webrtc_test_metrics_output_path,
+ absl::optional<std::vector<std::string>> metrics_to_plot) {
+ g_test_suite = test_suite;
+ g_argc = argc;
+ g_argv = argv;
+ g_write_perf_output = write_perf_output;
+ g_export_perf_results_new_api = export_perf_results_new_api;
+ g_webrtc_test_metrics_output_path = webrtc_test_metrics_output_path;
+ g_metrics_to_plot = std::move(metrics_to_plot);
+}
+
+void RunTestsFromIOSApp() {
+ @autoreleasepool {
+ exit(UIApplicationMain(g_argc, g_argv, nil, @"WebRtcUnitTestDelegate"));
+ }
+}
+
+bool ShouldRunIOSUnittestsWithXCTest() {
+ if (g_is_xctest.has_value()) {
+ return g_is_xctest.value();
+ }
+
+ char **argv = g_argv;
+ while (*argv != nullptr) {
+ if (strstr(*argv, kEnableRunIOSUnittestsWithXCTest) != nullptr) {
+ g_is_xctest = absl::optional<bool>(true);
+ return true;
+ }
+ argv++;
+ }
+ g_is_xctest = absl::optional<bool>(false);
+ return false;
+}
+
+} // namespace test
+} // namespace rtc
diff --git a/third_party/libwebrtc/test/layer_filtering_transport.cc b/third_party/libwebrtc/test/layer_filtering_transport.cc
new file mode 100644
index 0000000000..f5e5ee3002
--- /dev/null
+++ b/third_party/libwebrtc/test/layer_filtering_transport.cc
@@ -0,0 +1,186 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/layer_filtering_transport.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include "api/rtp_headers.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
+#include "modules/rtp_rtcp/source/rtp_video_header.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "modules/video_coding/codecs/interface/common_constants.h"
+#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
+#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+LayerFilteringTransport::LayerFilteringTransport(
+ TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ uint8_t vp8_video_payload_type,
+ uint8_t vp9_video_payload_type,
+ int selected_tl,
+ int selected_sl,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ uint32_t ssrc_to_filter_min,
+ uint32_t ssrc_to_filter_max,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions)
+ : DirectTransport(task_queue,
+ std::move(pipe),
+ send_call,
+ payload_type_map,
+ audio_extensions,
+ video_extensions),
+ vp8_video_payload_type_(vp8_video_payload_type),
+ vp9_video_payload_type_(vp9_video_payload_type),
+ vp8_depacketizer_(CreateVideoRtpDepacketizer(kVideoCodecVP8)),
+ vp9_depacketizer_(CreateVideoRtpDepacketizer(kVideoCodecVP9)),
+ selected_tl_(selected_tl),
+ selected_sl_(selected_sl),
+ discarded_last_packet_(false),
+ ssrc_to_filter_min_(ssrc_to_filter_min),
+ ssrc_to_filter_max_(ssrc_to_filter_max) {}
+
+LayerFilteringTransport::LayerFilteringTransport(
+ TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ uint8_t vp8_video_payload_type,
+ uint8_t vp9_video_payload_type,
+ int selected_tl,
+ int selected_sl,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions)
+ : LayerFilteringTransport(task_queue,
+ std::move(pipe),
+ send_call,
+ vp8_video_payload_type,
+ vp9_video_payload_type,
+ selected_tl,
+ selected_sl,
+ payload_type_map,
+ /*ssrc_to_filter_min=*/0,
+ /*ssrc_to_filter_max=*/0xFFFFFFFF,
+ audio_extensions,
+ video_extensions) {}
+
+bool LayerFilteringTransport::DiscardedLastPacket() const {
+ return discarded_last_packet_;
+}
+
+bool LayerFilteringTransport::SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) {
+ if (selected_tl_ == -1 && selected_sl_ == -1) {
+ // Nothing to change, forward the packet immediately.
+ return test::DirectTransport::SendRtp(packet, length, options);
+ }
+
+ RtpPacket rtp_packet;
+ rtp_packet.Parse(packet, length);
+
+ if (rtp_packet.Ssrc() < ssrc_to_filter_min_ ||
+ rtp_packet.Ssrc() > ssrc_to_filter_max_) {
+ // Nothing to change, forward the packet immediately.
+ return test::DirectTransport::SendRtp(packet, length, options);
+ }
+
+ if (rtp_packet.PayloadType() == vp8_video_payload_type_ ||
+ rtp_packet.PayloadType() == vp9_video_payload_type_) {
+ const bool is_vp8 = rtp_packet.PayloadType() == vp8_video_payload_type_;
+ VideoRtpDepacketizer& depacketizer =
+ is_vp8 ? *vp8_depacketizer_ : *vp9_depacketizer_;
+ if (auto parsed_payload = depacketizer.Parse(rtp_packet.PayloadBuffer())) {
+ int temporal_idx;
+ int spatial_idx;
+ bool non_ref_for_inter_layer_pred;
+ bool end_of_frame;
+
+ if (is_vp8) {
+ temporal_idx = absl::get<RTPVideoHeaderVP8>(
+ parsed_payload->video_header.video_type_header)
+ .temporalIdx;
+ spatial_idx = kNoSpatialIdx;
+ num_active_spatial_layers_ = 1;
+ non_ref_for_inter_layer_pred = false;
+ end_of_frame = true;
+ } else {
+ const auto& vp9_header = absl::get<RTPVideoHeaderVP9>(
+ parsed_payload->video_header.video_type_header);
+ temporal_idx = vp9_header.temporal_idx;
+ spatial_idx = vp9_header.spatial_idx;
+ non_ref_for_inter_layer_pred = vp9_header.non_ref_for_inter_layer_pred;
+ end_of_frame = vp9_header.end_of_frame;
+ if (vp9_header.ss_data_available) {
+ RTC_DCHECK(vp9_header.temporal_idx == kNoTemporalIdx ||
+ vp9_header.temporal_idx == 0);
+ num_active_spatial_layers_ = vp9_header.num_spatial_layers;
+ }
+ }
+
+ if (spatial_idx == kNoSpatialIdx)
+ num_active_spatial_layers_ = 1;
+
+ RTC_CHECK_GT(num_active_spatial_layers_, 0);
+
+ if (selected_sl_ >= 0 &&
+ spatial_idx ==
+ std::min(num_active_spatial_layers_ - 1, selected_sl_) &&
+ end_of_frame) {
+ // This layer is now the last in the superframe.
+ rtp_packet.SetMarker(true);
+ } else {
+ const bool higher_temporal_layer =
+ (selected_tl_ >= 0 && temporal_idx != kNoTemporalIdx &&
+ temporal_idx > selected_tl_);
+
+ const bool higher_spatial_layer =
+ (selected_sl_ >= 0 && spatial_idx != kNoSpatialIdx &&
+ spatial_idx > selected_sl_);
+
+ // Filter out non-reference lower spatial layers since they are not
+ // needed for decoding of target spatial layer.
+ const bool lower_non_ref_spatial_layer =
+ (selected_sl_ >= 0 && spatial_idx != kNoSpatialIdx &&
+ spatial_idx <
+ std::min(num_active_spatial_layers_ - 1, selected_sl_) &&
+ non_ref_for_inter_layer_pred);
+
+ if (higher_temporal_layer || higher_spatial_layer ||
+ lower_non_ref_spatial_layer) {
+ // Truncate packet to a padding packet.
+ rtp_packet.SetPayloadSize(0);
+ rtp_packet.SetPadding(1);
+ rtp_packet.SetMarker(false);
+ discarded_last_packet_ = true;
+ }
+ }
+ } else {
+ RTC_DCHECK_NOTREACHED() << "Parse error";
+ }
+ }
+
+ return test::DirectTransport::SendRtp(rtp_packet.data(), rtp_packet.size(),
+ options);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/layer_filtering_transport.h b/third_party/libwebrtc/test/layer_filtering_transport.h
new file mode 100644
index 0000000000..3aefd7159b
--- /dev/null
+++ b/third_party/libwebrtc/test/layer_filtering_transport.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_LAYER_FILTERING_TRANSPORT_H_
+#define TEST_LAYER_FILTERING_TRANSPORT_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <map>
+#include <memory>
+
+#include "api/call/transport.h"
+#include "api/media_types.h"
+#include "call/call.h"
+#include "call/simulated_packet_receiver.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
+#include "test/direct_transport.h"
+
+namespace webrtc {
+
+namespace test {
+
+class LayerFilteringTransport : public test::DirectTransport {
+ public:
+ LayerFilteringTransport(
+ TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ uint8_t vp8_video_payload_type,
+ uint8_t vp9_video_payload_type,
+ int selected_tl,
+ int selected_sl,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ uint32_t ssrc_to_filter_min,
+ uint32_t ssrc_to_filter_max,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions);
+ LayerFilteringTransport(
+ TaskQueueBase* task_queue,
+ std::unique_ptr<SimulatedPacketReceiverInterface> pipe,
+ Call* send_call,
+ uint8_t vp8_video_payload_type,
+ uint8_t vp9_video_payload_type,
+ int selected_tl,
+ int selected_sl,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions);
+ bool DiscardedLastPacket() const;
+ bool SendRtp(const uint8_t* data,
+ size_t length,
+ const PacketOptions& options) override;
+
+ private:
+ // Used to distinguish between VP8 and VP9.
+ const uint8_t vp8_video_payload_type_;
+ const uint8_t vp9_video_payload_type_;
+ const std::unique_ptr<VideoRtpDepacketizer> vp8_depacketizer_;
+ const std::unique_ptr<VideoRtpDepacketizer> vp9_depacketizer_;
+ // Discard or invalidate all temporal/spatial layers with id greater than the
+ // selected one. -1 to disable filtering.
+ const int selected_tl_;
+ const int selected_sl_;
+ bool discarded_last_packet_;
+ int num_active_spatial_layers_;
+ const uint32_t ssrc_to_filter_min_;
+ const uint32_t ssrc_to_filter_max_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_LAYER_FILTERING_TRANSPORT_H_
diff --git a/third_party/libwebrtc/test/linux/glx_renderer.cc b/third_party/libwebrtc/test/linux/glx_renderer.cc
new file mode 100644
index 0000000000..509a6c286e
--- /dev/null
+++ b/third_party/libwebrtc/test/linux/glx_renderer.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/linux/glx_renderer.h"
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <stdlib.h>
+
+namespace webrtc {
+namespace test {
+
+GlxRenderer::GlxRenderer(size_t width, size_t height)
+ : width_(width), height_(height), display_(NULL), context_(NULL) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+}
+
+GlxRenderer::~GlxRenderer() {
+ Destroy();
+}
+
+bool GlxRenderer::Init(const char* window_title) {
+ if ((display_ = XOpenDisplay(NULL)) == NULL) {
+ Destroy();
+ return false;
+ }
+
+ int screen = DefaultScreen(display_);
+
+ XVisualInfo* vi;
+ int attr_list[] = {
+ GLX_DOUBLEBUFFER, GLX_RGBA, GLX_RED_SIZE, 4, GLX_GREEN_SIZE, 4,
+ GLX_BLUE_SIZE, 4, GLX_DEPTH_SIZE, 16, None,
+ };
+
+ if ((vi = glXChooseVisual(display_, screen, attr_list)) == NULL) {
+ Destroy();
+ return false;
+ }
+
+ context_ = glXCreateContext(display_, vi, 0, true);
+ if (context_ == NULL) {
+ Destroy();
+ return false;
+ }
+
+ XSetWindowAttributes window_attributes;
+ window_attributes.colormap = XCreateColormap(
+ display_, RootWindow(display_, vi->screen), vi->visual, AllocNone);
+ window_attributes.border_pixel = 0;
+ window_attributes.event_mask = StructureNotifyMask | ExposureMask;
+ window_ = XCreateWindow(display_, RootWindow(display_, vi->screen), 0, 0,
+ width_, height_, 0, vi->depth, InputOutput,
+ vi->visual, CWBorderPixel | CWColormap | CWEventMask,
+ &window_attributes);
+ XFree(vi);
+
+ XSetStandardProperties(display_, window_, window_title, window_title, None,
+ NULL, 0, NULL);
+
+ Atom wm_delete = XInternAtom(display_, "WM_DELETE_WINDOW", True);
+ if (wm_delete != None) {
+ XSetWMProtocols(display_, window_, &wm_delete, 1);
+ }
+
+ XMapRaised(display_, window_);
+
+ if (!glXMakeCurrent(display_, window_, context_)) {
+ Destroy();
+ return false;
+ }
+ GlRenderer::Init();
+ if (!glXMakeCurrent(display_, None, NULL)) {
+ Destroy();
+ return false;
+ }
+
+ Resize(width_, height_);
+ return true;
+}
+
+void GlxRenderer::Destroy() {
+ if (context_ != NULL) {
+ glXMakeCurrent(display_, window_, context_);
+ GlRenderer::Destroy();
+ glXMakeCurrent(display_, None, NULL);
+ glXDestroyContext(display_, context_);
+ context_ = NULL;
+ }
+
+ if (display_ != NULL) {
+ XCloseDisplay(display_);
+ display_ = NULL;
+ }
+}
+
+GlxRenderer* GlxRenderer::Create(const char* window_title,
+ size_t width,
+ size_t height) {
+ GlxRenderer* glx_renderer = new GlxRenderer(width, height);
+ if (!glx_renderer->Init(window_title)) {
+ // TODO(pbos): Add GLX-failed warning here?
+ delete glx_renderer;
+ return NULL;
+ }
+ return glx_renderer;
+}
+
+void GlxRenderer::Resize(size_t width, size_t height) {
+ width_ = width;
+ height_ = height;
+ if (!glXMakeCurrent(display_, window_, context_)) {
+ abort();
+ }
+ GlRenderer::ResizeViewport(width_, height_);
+ if (!glXMakeCurrent(display_, None, NULL)) {
+ abort();
+ }
+
+ XSizeHints* size_hints = XAllocSizeHints();
+ if (size_hints == NULL) {
+ abort();
+ }
+ size_hints->flags = PAspect;
+ size_hints->min_aspect.x = size_hints->max_aspect.x = width_;
+ size_hints->min_aspect.y = size_hints->max_aspect.y = height_;
+ XSetWMNormalHints(display_, window_, size_hints);
+ XFree(size_hints);
+
+ XWindowChanges wc;
+ wc.width = static_cast<int>(width);
+ wc.height = static_cast<int>(height);
+ XConfigureWindow(display_, window_, CWWidth | CWHeight, &wc);
+}
+
+void GlxRenderer::OnFrame(const webrtc::VideoFrame& frame) {
+ if (static_cast<size_t>(frame.width()) != width_ ||
+ static_cast<size_t>(frame.height()) != height_) {
+ Resize(static_cast<size_t>(frame.width()),
+ static_cast<size_t>(frame.height()));
+ }
+
+ XEvent event;
+ if (!glXMakeCurrent(display_, window_, context_)) {
+ abort();
+ }
+ while (XPending(display_)) {
+ XNextEvent(display_, &event);
+ switch (event.type) {
+ case ConfigureNotify:
+ GlRenderer::ResizeViewport(event.xconfigure.width,
+ event.xconfigure.height);
+ break;
+ default:
+ break;
+ }
+ }
+
+ GlRenderer::OnFrame(frame);
+ glXSwapBuffers(display_, window_);
+
+ if (!glXMakeCurrent(display_, None, NULL)) {
+ abort();
+ }
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/linux/glx_renderer.h b/third_party/libwebrtc/test/linux/glx_renderer.h
new file mode 100644
index 0000000000..8add60d964
--- /dev/null
+++ b/third_party/libwebrtc/test/linux/glx_renderer.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_LINUX_GLX_RENDERER_H_
+#define TEST_LINUX_GLX_RENDERER_H_
+
+#include <GL/glx.h>
+#include <X11/X.h>
+#include <X11/Xlib.h>
+#include <stddef.h>
+
+#include "api/video/video_frame.h"
+#include "test/gl/gl_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+class GlxRenderer : public GlRenderer {
+ public:
+ static GlxRenderer* Create(const char* window_title,
+ size_t width,
+ size_t height);
+ virtual ~GlxRenderer();
+
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ GlxRenderer(size_t width, size_t height);
+
+ bool Init(const char* window_title);
+ void Resize(size_t width, size_t height);
+ void Destroy();
+
+ size_t width_, height_;
+
+ Display* display_;
+ Window window_;
+ GLXContext context_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_LINUX_GLX_RENDERER_H_
diff --git a/third_party/libwebrtc/test/linux/video_renderer_linux.cc b/third_party/libwebrtc/test/linux/video_renderer_linux.cc
new file mode 100644
index 0000000000..74c95235df
--- /dev/null
+++ b/third_party/libwebrtc/test/linux/video_renderer_linux.cc
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <stddef.h>
+
+#include "test/linux/glx_renderer.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+VideoRenderer* VideoRenderer::CreatePlatformRenderer(const char* window_title,
+ size_t width,
+ size_t height) {
+ GlxRenderer* glx_renderer = GlxRenderer::Create(window_title, width, height);
+ if (glx_renderer != NULL) {
+ return glx_renderer;
+ }
+ return NULL;
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/logging/BUILD.gn b/third_party/libwebrtc/test/logging/BUILD.gn
new file mode 100644
index 0000000000..301c0e59c0
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_library("log_writer") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [
+ "file_log_writer.cc",
+ "file_log_writer.h",
+ "log_writer.cc",
+ "log_writer.h",
+ "memory_log_writer.cc",
+ "memory_log_writer.h",
+ ]
+
+ deps = [
+ "../../api:libjingle_logging_api",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:rtc_base_tests_utils",
+ "../../rtc_base:stringutils",
+ "../../test:fileutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
diff --git a/third_party/libwebrtc/test/logging/file_log_writer.cc b/third_party/libwebrtc/test/logging/file_log_writer.cc
new file mode 100644
index 0000000000..9189e1630d
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/file_log_writer.cc
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/logging/file_log_writer.h"
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace webrtc_impl {
+
+FileLogWriter::FileLogWriter(absl::string_view file_path)
+ : out_(std::fopen(std::string(file_path).c_str(), "wb")) {
+ RTC_CHECK(out_ != nullptr)
+ << "Failed to open file: '" << file_path << "' for writing.";
+}
+
+FileLogWriter::~FileLogWriter() {
+ std::fclose(out_);
+}
+
+bool FileLogWriter::IsActive() const {
+ return true;
+}
+
+bool FileLogWriter::Write(absl::string_view value) {
+ // We don't expect the write to fail. If it does, we don't want to risk
+ // silently ignoring it.
+ RTC_CHECK_EQ(std::fwrite(value.data(), 1, value.size(), out_), value.size())
+ << "fwrite failed unexpectedly: " << errno;
+ return true;
+}
+
+void FileLogWriter::Flush() {
+ RTC_CHECK_EQ(fflush(out_), 0) << "fflush failed unexpectedly: " << errno;
+}
+
+} // namespace webrtc_impl
+
+FileLogWriterFactory::FileLogWriterFactory(absl::string_view base_path)
+ : base_path_(base_path) {
+ for (size_t i = 0; i < base_path.size(); ++i) {
+ if (base_path[i] == '/')
+ test::CreateDir(base_path.substr(0, i));
+ }
+}
+
+FileLogWriterFactory::~FileLogWriterFactory() {}
+
+std::unique_ptr<RtcEventLogOutput> FileLogWriterFactory::Create(
+ absl::string_view filename) {
+ return std::make_unique<webrtc_impl::FileLogWriter>(base_path_ +
+ std::string(filename));
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/logging/file_log_writer.h b/third_party/libwebrtc/test/logging/file_log_writer.h
new file mode 100644
index 0000000000..c49b96ceff
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/file_log_writer.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_LOGGING_FILE_LOG_WRITER_H_
+#define TEST_LOGGING_FILE_LOG_WRITER_H_
+
+#include <cstdio>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "test/logging/log_writer.h"
+
+namespace webrtc {
+namespace webrtc_impl {
+class FileLogWriter final : public RtcEventLogOutput {
+ public:
+ explicit FileLogWriter(absl::string_view file_path);
+ ~FileLogWriter() final;
+ bool IsActive() const override;
+ bool Write(absl::string_view value) override;
+ void Flush() override;
+
+ private:
+ std::FILE* const out_;
+};
+} // namespace webrtc_impl
+class FileLogWriterFactory final : public LogWriterFactoryInterface {
+ public:
+ explicit FileLogWriterFactory(absl::string_view base_path);
+ ~FileLogWriterFactory() final;
+
+ std::unique_ptr<RtcEventLogOutput> Create(
+ absl::string_view filename) override;
+
+ private:
+ const std::string base_path_;
+ std::vector<std::unique_ptr<webrtc_impl::FileLogWriter>> writers_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_LOGGING_FILE_LOG_WRITER_H_
diff --git a/third_party/libwebrtc/test/logging/log_writer.cc b/third_party/libwebrtc/test/logging/log_writer.cc
new file mode 100644
index 0000000000..d9b8c1e68f
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/log_writer.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/logging/log_writer.h"
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+
+LogWriterFactoryAddPrefix::LogWriterFactoryAddPrefix(
+ LogWriterFactoryInterface* base,
+ absl::string_view prefix)
+ : base_factory_(base), prefix_(prefix) {}
+
+std::unique_ptr<RtcEventLogOutput> LogWriterFactoryAddPrefix::Create(
+ absl::string_view filename) {
+ return base_factory_->Create(prefix_ + std::string(filename));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/logging/log_writer.h b/third_party/libwebrtc/test/logging/log_writer.h
new file mode 100644
index 0000000000..335dab353f
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/log_writer.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_LOGGING_LOG_WRITER_H_
+#define TEST_LOGGING_LOG_WRITER_H_
+
+#include <stdarg.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/rtc_event_log_output.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+template <class... Args>
+inline void LogWriteFormat(RtcEventLogOutput* out_, const char* fmt, ...) {
+ va_list args, copy;
+ va_start(args, fmt);
+ va_copy(copy, args);
+ const int predicted_length = std::vsnprintf(nullptr, 0, fmt, copy);
+ va_end(copy);
+
+ RTC_DCHECK_GE(predicted_length, 0);
+ std::string out_str(predicted_length, '\0');
+ if (predicted_length > 0) {
+ // Pass "+ 1" to vsnprintf to include space for the '\0'.
+ const int actual_length =
+ std::vsnprintf(&out_str.front(), predicted_length + 1, fmt, args);
+ RTC_DCHECK_GE(actual_length, 0);
+ }
+ va_end(args);
+ out_->Write(out_str);
+}
+
+class LogWriterFactoryInterface {
+ public:
+ virtual std::unique_ptr<RtcEventLogOutput> Create(
+ absl::string_view filename) = 0;
+ virtual ~LogWriterFactoryInterface() = default;
+};
+
+class LogWriterFactoryAddPrefix : public LogWriterFactoryInterface {
+ public:
+ LogWriterFactoryAddPrefix(LogWriterFactoryInterface* base,
+ absl::string_view prefix);
+ std::unique_ptr<RtcEventLogOutput> Create(
+ absl::string_view filename) override;
+
+ private:
+ LogWriterFactoryInterface* const base_factory_;
+ const std::string prefix_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_LOGGING_LOG_WRITER_H_
diff --git a/third_party/libwebrtc/test/logging/memory_log_writer.cc b/third_party/libwebrtc/test/logging/memory_log_writer.cc
new file mode 100644
index 0000000000..eae9223c77
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/memory_log_writer.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/logging/memory_log_writer.h"
+
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+class MemoryLogWriter final : public RtcEventLogOutput {
+ public:
+ explicit MemoryLogWriter(std::map<std::string, std::string>* target,
+ absl::string_view filename)
+ : target_(target), filename_(filename) {}
+ ~MemoryLogWriter() final { target_->insert({filename_, std::move(buffer_)}); }
+ bool IsActive() const override { return true; }
+ bool Write(absl::string_view value) override {
+ buffer_.append(value.data(), value.size());
+ return true;
+ }
+ void Flush() override {}
+
+ private:
+ std::map<std::string, std::string>* const target_;
+ const std::string filename_;
+ std::string buffer_;
+};
+
+class MemoryLogWriterFactory final : public LogWriterFactoryInterface {
+ public:
+ explicit MemoryLogWriterFactory(std::map<std::string, std::string>* target)
+ : target_(target) {}
+ ~MemoryLogWriterFactory() override {}
+ std::unique_ptr<RtcEventLogOutput> Create(
+ absl::string_view filename) override {
+ return std::make_unique<MemoryLogWriter>(target_, filename);
+ }
+
+ private:
+ std::map<std::string, std::string>* const target_;
+};
+
+} // namespace
+
+MemoryLogStorage::MemoryLogStorage() {}
+
+MemoryLogStorage::~MemoryLogStorage() {}
+
+std::unique_ptr<LogWriterFactoryInterface> MemoryLogStorage::CreateFactory() {
+ return std::make_unique<MemoryLogWriterFactory>(&logs_);
+}
+
+// namespace webrtc_impl
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/logging/memory_log_writer.h b/third_party/libwebrtc/test/logging/memory_log_writer.h
new file mode 100644
index 0000000000..e795b2fd10
--- /dev/null
+++ b/third_party/libwebrtc/test/logging/memory_log_writer.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_LOGGING_MEMORY_LOG_WRITER_H_
+#define TEST_LOGGING_MEMORY_LOG_WRITER_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "test/logging/log_writer.h"
+
+namespace webrtc {
+
+// Allows creating log writer factories that creates log writers that saves
+// their content to memory. When the log writers are destroyed, their content is
+// saved to the logs_ member of this class. The intended usage is to keep this
+// class alive after the created factories and writers have been destroyed and
+// then use logs() to access all the saved logs.
+class MemoryLogStorage {
+ public:
+ MemoryLogStorage();
+ ~MemoryLogStorage();
+ std::unique_ptr<LogWriterFactoryInterface> CreateFactory();
+ const std::map<std::string, std::string>& logs() { return logs_; }
+
+ private:
+ std::map<std::string, std::string> logs_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_LOGGING_MEMORY_LOG_WRITER_H_
diff --git a/third_party/libwebrtc/test/mac/Info.plist b/third_party/libwebrtc/test/mac/Info.plist
new file mode 100644
index 0000000000..8a2b5cf0a0
--- /dev/null
+++ b/third_party/libwebrtc/test/mac/Info.plist
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>CFBundleIdentifier</key>
+ <string>org.webrtc.video_loopback</string>
+ <key>CFBundleName</key>
+ <string>video_loopback</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>NSCameraUsageDescription</key>
+ <string>Camera access needed for video calling</string>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Microphone access needed for video calling</string>
+</dict>
+</plist>
diff --git a/third_party/libwebrtc/test/mac/run_test.mm b/third_party/libwebrtc/test/mac/run_test.mm
new file mode 100644
index 0000000000..38c6c8f8c1
--- /dev/null
+++ b/third_party/libwebrtc/test/mac/run_test.mm
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+
+#include "test/run_test.h"
+
+// Converting a C++ function pointer to an Objective-C block.
+typedef void(^TestBlock)();
+TestBlock functionToBlock(void(*function)()) {
+ return [^(void) { function(); } copy];
+}
+
+// Class calling the test function on the platform specific thread.
+@interface TestRunner : NSObject {
+ BOOL running_;
+}
+- (void)runAllTests:(TestBlock)ignored;
+- (BOOL)running;
+@end
+
+@implementation TestRunner
+- (id)init {
+ self = [super init];
+ if (self) {
+ running_ = YES;
+ }
+ return self;
+}
+
+- (void)runAllTests:(TestBlock)testBlock {
+ @autoreleasepool {
+ testBlock();
+ running_ = NO;
+ }
+}
+
+- (BOOL)running {
+ return running_;
+}
+@end
+
+namespace webrtc {
+namespace test {
+
+void RunTest(void(*test)()) {
+ @autoreleasepool {
+ [NSApplication sharedApplication];
+
+ // Convert the function pointer to an Objective-C block and call on a
+ // separate thread, to avoid blocking the main thread.
+ TestRunner *testRunner = [[TestRunner alloc] init];
+ TestBlock testBlock = functionToBlock(test);
+ [NSThread detachNewThreadSelector:@selector(runAllTests:)
+ toTarget:testRunner
+ withObject:testBlock];
+
+ NSRunLoop *runLoop = [NSRunLoop currentRunLoop];
+ while ([testRunner running] && [runLoop runMode:NSDefaultRunLoopMode
+ beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]])
+ ;
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mac/video_renderer_mac.h b/third_party/libwebrtc/test/mac/video_renderer_mac.h
new file mode 100644
index 0000000000..8e629b0a49
--- /dev/null
+++ b/third_party/libwebrtc/test/mac/video_renderer_mac.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MAC_VIDEO_RENDERER_MAC_H_
+#define TEST_MAC_VIDEO_RENDERER_MAC_H_
+
+#include "test/gl/gl_renderer.h"
+
+@class CocoaWindow;
+
+namespace webrtc {
+namespace test {
+
+class MacRenderer : public GlRenderer {
+ public:
+ MacRenderer();
+ virtual ~MacRenderer();
+
+ MacRenderer(const MacRenderer&) = delete;
+ MacRenderer& operator=(const MacRenderer&) = delete;
+
+ bool Init(const char* window_title, int width, int height);
+
+ // Implements GlRenderer.
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ CocoaWindow* window_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_MAC_VIDEO_RENDERER_MAC_H_
diff --git a/third_party/libwebrtc/test/mac/video_renderer_mac.mm b/third_party/libwebrtc/test/mac/video_renderer_mac.mm
new file mode 100644
index 0000000000..7103375383
--- /dev/null
+++ b/third_party/libwebrtc/test/mac/video_renderer_mac.mm
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mac/video_renderer_mac.h"
+
+#import <Cocoa/Cocoa.h>
+
+// Creates a Cocoa Window with an OpenGL context, used together with an OpenGL
+// renderer.
+@interface CocoaWindow : NSObject {
+ @private
+ NSWindow *window_;
+ NSOpenGLContext *context_;
+ NSString *title_;
+ int width_;
+ int height_;
+}
+
+- (id)initWithTitle:(NSString *)title width:(int)width height:(int)height;
+// 'createWindow' must be called on the main thread.
+- (void)createWindow:(NSObject *)ignored;
+- (void)makeCurrentContext;
+
+@end
+
+@implementation CocoaWindow
+ static NSInteger nextXOrigin_;
+ static NSInteger nextYOrigin_;
+
+- (id)initWithTitle:(NSString *)title width:(int)width height:(int)height {
+ if (self = [super init]) {
+ title_ = title;
+ width_ = width;
+ height_ = height;
+ }
+ return self;
+}
+
+- (void)createWindow:(NSObject *)ignored {
+ NSInteger xOrigin = nextXOrigin_;
+ NSRect screenFrame = [[NSScreen mainScreen] frame];
+ if (nextXOrigin_ + width_ < screenFrame.size.width) {
+ nextXOrigin_ += width_;
+ } else {
+ xOrigin = 0;
+ nextXOrigin_ = 0;
+ nextYOrigin_ += height_;
+ }
+ if (nextYOrigin_ + height_ > screenFrame.size.height) {
+ xOrigin = 0;
+ nextXOrigin_ = 0;
+ nextYOrigin_ = 0;
+ }
+ NSInteger yOrigin = nextYOrigin_;
+ NSRect windowFrame = NSMakeRect(xOrigin, yOrigin, width_, height_);
+ window_ = [[NSWindow alloc] initWithContentRect:windowFrame
+ styleMask:NSWindowStyleMaskTitled
+ backing:NSBackingStoreBuffered
+ defer:NO];
+
+ NSRect viewFrame = NSMakeRect(0, 0, width_, height_);
+ NSOpenGLView *view = [[NSOpenGLView alloc] initWithFrame:viewFrame pixelFormat:nil];
+ context_ = [view openGLContext];
+
+ [[window_ contentView] addSubview:view];
+ [window_ setTitle:title_];
+ [window_ makeKeyAndOrderFront:NSApp];
+}
+
+- (void)makeCurrentContext {
+ [context_ makeCurrentContext];
+}
+
+@end
+
+namespace webrtc {
+namespace test {
+
+VideoRenderer* VideoRenderer::CreatePlatformRenderer(const char* window_title,
+ size_t width,
+ size_t height) {
+ MacRenderer* renderer = new MacRenderer();
+ if (!renderer->Init(window_title, width, height)) {
+ delete renderer;
+ return NULL;
+ }
+ return renderer;
+}
+
+MacRenderer::MacRenderer()
+ : window_(NULL) {}
+
+MacRenderer::~MacRenderer() {
+ GlRenderer::Destroy();
+}
+
+bool MacRenderer::Init(const char* window_title, int width, int height) {
+ window_ = [[CocoaWindow alloc]
+ initWithTitle:[NSString stringWithUTF8String:window_title]
+ width:width
+ height:height];
+ if (!window_)
+ return false;
+ [window_ performSelectorOnMainThread:@selector(createWindow:)
+ withObject:nil
+ waitUntilDone:YES];
+
+ [window_ makeCurrentContext];
+ GlRenderer::Init();
+ GlRenderer::ResizeViewport(width, height);
+ return true;
+}
+
+void MacRenderer::OnFrame(const VideoFrame& frame) {
+ [window_ makeCurrentContext];
+ GlRenderer::OnFrame(frame);
+}
+
+} // test
+} // webrtc
diff --git a/third_party/libwebrtc/test/mac_capturer.h b/third_party/libwebrtc/test/mac_capturer.h
new file mode 100644
index 0000000000..3d7ee77b45
--- /dev/null
+++ b/third_party/libwebrtc/test/mac_capturer.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_MAC_CAPTURER_H_
+#define TEST_MAC_CAPTURER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/media_stream_interface.h"
+#include "api/scoped_refptr.h"
+#include "modules/video_capture/video_capture.h"
+#include "rtc_base/thread.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace test {
+
+class MacCapturer : public TestVideoCapturer,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ static MacCapturer* Create(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index);
+ ~MacCapturer() override;
+
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ MacCapturer(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index);
+ void Destroy();
+
+ void* capturer_;
+ void* adapter_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_MAC_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/mac_capturer.mm b/third_party/libwebrtc/test/mac_capturer.mm
new file mode 100644
index 0000000000..da8e9b76b6
--- /dev/null
+++ b/third_party/libwebrtc/test/mac_capturer.mm
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mac_capturer.h"
+
+#import "sdk/objc/base/RTCVideoCapturer.h"
+#import "sdk/objc/components/capturer/RTCCameraVideoCapturer.h"
+#import "sdk/objc/native/api/video_capturer.h"
+#import "sdk/objc/native/src/objc_frame_buffer.h"
+
+@interface RTCTestVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
+@property(nonatomic) webrtc::test::MacCapturer *capturer;
+@end
+
+@implementation RTCTestVideoSourceAdapter
+@synthesize capturer = _capturer;
+
+- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer
+ didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
+ const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(frame.buffer);
+ _capturer->OnFrame(webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(timestamp_us)
+ .build());
+}
+
+@end
+
+namespace {
+
+AVCaptureDeviceFormat *SelectClosestFormat(AVCaptureDevice *device, size_t width, size_t height) {
+ NSArray<AVCaptureDeviceFormat *> *formats =
+ [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
+ AVCaptureDeviceFormat *selectedFormat = nil;
+ int currentDiff = INT_MAX;
+ for (AVCaptureDeviceFormat *format in formats) {
+ CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+ int diff =
+ std::abs((int64_t)width - dimension.width) + std::abs((int64_t)height - dimension.height);
+ if (diff < currentDiff) {
+ selectedFormat = format;
+ currentDiff = diff;
+ }
+ }
+ return selectedFormat;
+}
+
+} // namespace
+
+namespace webrtc {
+namespace test {
+
+MacCapturer::MacCapturer(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index) {
+ RTCTestVideoSourceAdapter *adapter = [[RTCTestVideoSourceAdapter alloc] init];
+ adapter_ = (__bridge_retained void *)adapter;
+ adapter.capturer = this;
+
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:adapter];
+ capturer_ = (__bridge_retained void *)capturer;
+
+ AVCaptureDevice *device =
+ [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices] objectAtIndex:capture_device_index];
+ AVCaptureDeviceFormat *format = SelectClosestFormat(device, width, height);
+ [capturer startCaptureWithDevice:device format:format fps:target_fps];
+}
+
+MacCapturer *MacCapturer::Create(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index) {
+ return new MacCapturer(width, height, target_fps, capture_device_index);
+}
+
+void MacCapturer::Destroy() {
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-variable"
+ RTCTestVideoSourceAdapter *adapter = (__bridge_transfer RTCTestVideoSourceAdapter *)adapter_;
+ RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
+ (__bridge_transfer RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer_;
+ [capturer stopCapture];
+#pragma clang diagnostic pop
+}
+
+MacCapturer::~MacCapturer() {
+ Destroy();
+}
+
+void MacCapturer::OnFrame(const VideoFrame &frame) {
+ TestVideoCapturer::OnFrame(frame);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mappable_native_buffer.cc b/third_party/libwebrtc/test/mappable_native_buffer.cc
new file mode 100644
index 0000000000..1b171e604b
--- /dev/null
+++ b/third_party/libwebrtc/test/mappable_native_buffer.cc
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mappable_native_buffer.h"
+
+#include "absl/algorithm/container.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_rotation.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+class NV12BufferWithDidConvertToI420 : public NV12Buffer {
+ public:
+ NV12BufferWithDidConvertToI420(int width, int height)
+ : NV12Buffer(width, height), did_convert_to_i420_(false) {}
+
+ bool did_convert_to_i420() const { return did_convert_to_i420_; }
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override {
+ did_convert_to_i420_ = true;
+ return NV12Buffer::ToI420();
+ }
+
+ private:
+ bool did_convert_to_i420_;
+};
+
+} // namespace
+
+VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms,
+ VideoFrameBuffer::Type mappable_type,
+ int width,
+ int height) {
+ VideoFrame frame =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(rtc::make_ref_counted<MappableNativeBuffer>(
+ mappable_type, width, height))
+ .set_timestamp_rtp(99)
+ .set_timestamp_ms(99)
+ .set_rotation(kVideoRotation_0)
+ .build();
+ frame.set_ntp_time_ms(ntp_time_ms);
+ return frame;
+}
+
+rtc::scoped_refptr<MappableNativeBuffer> GetMappableNativeBufferFromVideoFrame(
+ const VideoFrame& frame) {
+ return rtc::scoped_refptr<MappableNativeBuffer>(
+ static_cast<MappableNativeBuffer*>(frame.video_frame_buffer().get()));
+}
+
+MappableNativeBuffer::ScaledBuffer::ScaledBuffer(
+ rtc::scoped_refptr<MappableNativeBuffer> parent,
+ int width,
+ int height)
+ : parent_(std::move(parent)), width_(width), height_(height) {}
+
+MappableNativeBuffer::ScaledBuffer::~ScaledBuffer() {}
+
+rtc::scoped_refptr<VideoFrameBuffer>
+MappableNativeBuffer::ScaledBuffer::CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ return rtc::make_ref_counted<ScaledBuffer>(parent_, scaled_width,
+ scaled_height);
+}
+
+rtc::scoped_refptr<I420BufferInterface>
+MappableNativeBuffer::ScaledBuffer::ToI420() {
+ return parent_->GetOrCreateMappedBuffer(width_, height_)->ToI420();
+}
+
+rtc::scoped_refptr<VideoFrameBuffer>
+MappableNativeBuffer::ScaledBuffer::GetMappedFrameBuffer(
+ rtc::ArrayView<VideoFrameBuffer::Type> types) {
+ if (absl::c_find(types, parent_->mappable_type_) == types.end())
+ return nullptr;
+ return parent_->GetOrCreateMappedBuffer(width_, height_);
+}
+
+MappableNativeBuffer::MappableNativeBuffer(VideoFrameBuffer::Type mappable_type,
+ int width,
+ int height)
+ : mappable_type_(mappable_type), width_(width), height_(height) {
+ RTC_DCHECK(mappable_type_ == VideoFrameBuffer::Type::kI420 ||
+ mappable_type_ == VideoFrameBuffer::Type::kNV12);
+}
+
+MappableNativeBuffer::~MappableNativeBuffer() {}
+
+rtc::scoped_refptr<VideoFrameBuffer> MappableNativeBuffer::CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ return FullSizeBuffer()->CropAndScale(
+ offset_x, offset_y, crop_width, crop_height, scaled_width, scaled_height);
+}
+
+rtc::scoped_refptr<I420BufferInterface> MappableNativeBuffer::ToI420() {
+ return FullSizeBuffer()->ToI420();
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> MappableNativeBuffer::GetMappedFrameBuffer(
+ rtc::ArrayView<VideoFrameBuffer::Type> types) {
+ return FullSizeBuffer()->GetMappedFrameBuffer(types);
+}
+
+std::vector<rtc::scoped_refptr<VideoFrameBuffer>>
+MappableNativeBuffer::GetMappedFramedBuffers() const {
+ MutexLock lock(&lock_);
+ return mapped_buffers_;
+}
+
+bool MappableNativeBuffer::DidConvertToI420() const {
+ if (mappable_type_ != VideoFrameBuffer::Type::kNV12)
+ return false;
+ MutexLock lock(&lock_);
+ for (auto& mapped_buffer : mapped_buffers_) {
+ if (static_cast<NV12BufferWithDidConvertToI420*>(mapped_buffer.get())
+ ->did_convert_to_i420()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+rtc::scoped_refptr<MappableNativeBuffer::ScaledBuffer>
+MappableNativeBuffer::FullSizeBuffer() {
+ return rtc::make_ref_counted<ScaledBuffer>(
+ rtc::scoped_refptr<MappableNativeBuffer>(this), width_, height_);
+}
+
+rtc::scoped_refptr<VideoFrameBuffer>
+MappableNativeBuffer::GetOrCreateMappedBuffer(int width, int height) {
+ MutexLock lock(&lock_);
+ for (auto& mapped_buffer : mapped_buffers_) {
+ if (mapped_buffer->width() == width && mapped_buffer->height() == height) {
+ return mapped_buffer;
+ }
+ }
+ rtc::scoped_refptr<VideoFrameBuffer> mapped_buffer;
+ switch (mappable_type_) {
+ case VideoFrameBuffer::Type::kI420: {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width, height);
+ I420Buffer::SetBlack(i420_buffer.get());
+ mapped_buffer = i420_buffer;
+ break;
+ }
+ case VideoFrameBuffer::Type::kNV12: {
+ auto nv12_buffer =
+ rtc::make_ref_counted<NV12BufferWithDidConvertToI420>(width, height);
+ nv12_buffer->InitializeData();
+ mapped_buffer = std::move(nv12_buffer);
+ break;
+ }
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ mapped_buffers_.push_back(mapped_buffer);
+ return mapped_buffer;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mappable_native_buffer.h b/third_party/libwebrtc/test/mappable_native_buffer.h
new file mode 100644
index 0000000000..08f155e07f
--- /dev/null
+++ b/third_party/libwebrtc/test/mappable_native_buffer.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MAPPABLE_NATIVE_BUFFER_H_
+#define TEST_MAPPABLE_NATIVE_BUFFER_H_
+
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/video/video_frame.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace test {
+
+class MappableNativeBuffer;
+
+VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms,
+ VideoFrameBuffer::Type mappable_type,
+ int width,
+ int height);
+
+rtc::scoped_refptr<MappableNativeBuffer> GetMappableNativeBufferFromVideoFrame(
+ const VideoFrame& frame);
+
+// A for-testing native buffer that is scalable and mappable. The contents of
+// the buffer is black and the pixels are created upon mapping. Mapped buffers
+// are stored inside MappableNativeBuffer, allowing tests to verify which
+// resolutions were mapped, e.g. when passing them in to an encoder or other
+// modules.
+class MappableNativeBuffer : public VideoFrameBuffer {
+ public:
+ // If `allow_i420_conversion` is false, calling ToI420() on a non-I420 buffer
+ // will DCHECK-crash. Used to ensure zero-copy in tests.
+ MappableNativeBuffer(VideoFrameBuffer::Type mappable_type,
+ int width,
+ int height);
+ ~MappableNativeBuffer() override;
+
+ VideoFrameBuffer::Type mappable_type() const { return mappable_type_; }
+
+ VideoFrameBuffer::Type type() const override { return Type::kNative; }
+ int width() const override { return width_; }
+ int height() const override { return height_; }
+
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+ rtc::scoped_refptr<VideoFrameBuffer> GetMappedFrameBuffer(
+ rtc::ArrayView<VideoFrameBuffer::Type> types) override;
+
+ // Gets all the buffers that have been mapped so far, including mappings of
+ // cropped and scaled buffers.
+ std::vector<rtc::scoped_refptr<VideoFrameBuffer>> GetMappedFramedBuffers()
+ const;
+ bool DidConvertToI420() const;
+
+ private:
+ friend class rtc::RefCountedObject<MappableNativeBuffer>;
+
+ class ScaledBuffer : public VideoFrameBuffer {
+ public:
+ ScaledBuffer(rtc::scoped_refptr<MappableNativeBuffer> parent,
+ int width,
+ int height);
+ ~ScaledBuffer() override;
+
+ VideoFrameBuffer::Type type() const override { return Type::kNative; }
+ int width() const override { return width_; }
+ int height() const override { return height_; }
+
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+ rtc::scoped_refptr<VideoFrameBuffer> GetMappedFrameBuffer(
+ rtc::ArrayView<VideoFrameBuffer::Type> types) override;
+
+ private:
+ friend class rtc::RefCountedObject<ScaledBuffer>;
+
+ const rtc::scoped_refptr<MappableNativeBuffer> parent_;
+ const int width_;
+ const int height_;
+ };
+
+ rtc::scoped_refptr<ScaledBuffer> FullSizeBuffer();
+ rtc::scoped_refptr<VideoFrameBuffer> GetOrCreateMappedBuffer(int width,
+ int height);
+
+ const VideoFrameBuffer::Type mappable_type_;
+ const int width_;
+ const int height_;
+ mutable Mutex lock_;
+ std::vector<rtc::scoped_refptr<VideoFrameBuffer>> mapped_buffers_
+ RTC_GUARDED_BY(&lock_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_MAPPABLE_NATIVE_BUFFER_H_
diff --git a/third_party/libwebrtc/test/mock_audio_decoder.cc b/third_party/libwebrtc/test/mock_audio_decoder.cc
new file mode 100644
index 0000000000..5af9f370cd
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_decoder.cc
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mock_audio_decoder.h"
+
+namespace webrtc {
+
+MockAudioDecoder::MockAudioDecoder() = default;
+MockAudioDecoder::~MockAudioDecoder() {
+ Die();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mock_audio_decoder.h b/third_party/libwebrtc/test/mock_audio_decoder.h
new file mode 100644
index 0000000000..8f44bf891d
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_decoder.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_AUDIO_DECODER_H_
+#define TEST_MOCK_AUDIO_DECODER_H_
+
+#include "api/audio_codecs/audio_decoder.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockAudioDecoder : public AudioDecoder {
+ public:
+ MockAudioDecoder();
+ ~MockAudioDecoder();
+ MOCK_METHOD(void, Die, ());
+ MOCK_METHOD(int,
+ DecodeInternal,
+ (const uint8_t*, size_t, int, int16_t*, SpeechType*),
+ (override));
+ MOCK_METHOD(bool, HasDecodePlc, (), (const, override));
+ MOCK_METHOD(size_t, DecodePlc, (size_t, int16_t*), (override));
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(int, ErrorCode, (), (override));
+ MOCK_METHOD(int, PacketDuration, (const uint8_t*, size_t), (const, override));
+ MOCK_METHOD(size_t, Channels, (), (const, override));
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
+};
+
+} // namespace webrtc
+#endif // TEST_MOCK_AUDIO_DECODER_H_
diff --git a/third_party/libwebrtc/test/mock_audio_decoder_factory.h b/third_party/libwebrtc/test/mock_audio_decoder_factory.h
new file mode 100644
index 0000000000..425ea38f9c
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_decoder_factory.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_AUDIO_DECODER_FACTORY_H_
+#define TEST_MOCK_AUDIO_DECODER_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/make_ref_counted.h"
+#include "api/scoped_refptr.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedDecoders,
+ (),
+ (override));
+ MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override));
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
+ std::unique_ptr<AudioDecoder> return_value;
+ MakeAudioDecoderMock(format, codec_pair_id, &return_value);
+ return return_value;
+ }
+ MOCK_METHOD(void,
+ MakeAudioDecoderMock,
+ (const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioDecoder>*));
+
+ // Creates a MockAudioDecoderFactory with no formats and that may not be
+ // invoked to create a codec - useful for initializing a voice engine, for
+ // example.
+ static rtc::scoped_refptr<webrtc::MockAudioDecoderFactory>
+ CreateUnusedFactory() {
+ using ::testing::_;
+ using ::testing::AnyNumber;
+ using ::testing::Return;
+
+ rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> factory =
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
+ ON_CALL(*factory.get(), GetSupportedDecoders())
+ .WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
+ EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber());
+ ON_CALL(*factory, IsSupportedDecoder(_)).WillByDefault(Return(false));
+ EXPECT_CALL(*factory, IsSupportedDecoder(_)).Times(AnyNumber());
+ EXPECT_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _)).Times(0);
+ return factory;
+ }
+
+ // Creates a MockAudioDecoderFactory with no formats that may be invoked to
+ // create a codec any number of times. It will, though, return nullptr on each
+ // call, since it supports no codecs.
+ static rtc::scoped_refptr<webrtc::MockAudioDecoderFactory>
+ CreateEmptyFactory() {
+ using ::testing::_;
+ using ::testing::AnyNumber;
+ using ::testing::Return;
+ using ::testing::SetArgPointee;
+
+ rtc::scoped_refptr<webrtc::MockAudioDecoderFactory> factory =
+ rtc::make_ref_counted<webrtc::MockAudioDecoderFactory>();
+ ON_CALL(*factory.get(), GetSupportedDecoders())
+ .WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
+ EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber());
+ ON_CALL(*factory, IsSupportedDecoder(_)).WillByDefault(Return(false));
+ EXPECT_CALL(*factory, IsSupportedDecoder(_)).Times(AnyNumber());
+ ON_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _))
+ .WillByDefault(SetArgPointee<2>(nullptr));
+ EXPECT_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _))
+ .Times(AnyNumber());
+ return factory;
+ }
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_AUDIO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/test/mock_audio_encoder.cc b/third_party/libwebrtc/test/mock_audio_encoder.cc
new file mode 100644
index 0000000000..36615111a5
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_encoder.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mock_audio_encoder.h"
+
+namespace webrtc {
+
+MockAudioEncoder::MockAudioEncoder() = default;
+MockAudioEncoder::~MockAudioEncoder() = default;
+
+MockAudioEncoder::FakeEncoding::FakeEncoding(
+ const AudioEncoder::EncodedInfo& info)
+ : info_(info) {}
+
+MockAudioEncoder::FakeEncoding::FakeEncoding(size_t encoded_bytes) {
+ info_.encoded_bytes = encoded_bytes;
+}
+
+AudioEncoder::EncodedInfo MockAudioEncoder::FakeEncoding::operator()(
+ uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer* encoded) {
+ encoded->SetSize(encoded->size() + info_.encoded_bytes);
+ return info_;
+}
+
+MockAudioEncoder::CopyEncoding::~CopyEncoding() = default;
+
+MockAudioEncoder::CopyEncoding::CopyEncoding(
+ AudioEncoder::EncodedInfo info,
+ rtc::ArrayView<const uint8_t> payload)
+ : info_(info), payload_(payload) {}
+
+MockAudioEncoder::CopyEncoding::CopyEncoding(
+ rtc::ArrayView<const uint8_t> payload)
+ : payload_(payload) {
+ info_.encoded_bytes = payload_.size();
+}
+
+AudioEncoder::EncodedInfo MockAudioEncoder::CopyEncoding::operator()(
+ uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer* encoded) {
+ RTC_CHECK(encoded);
+ RTC_CHECK_LE(info_.encoded_bytes, payload_.size());
+ encoded->AppendData(payload_.data(), info_.encoded_bytes);
+ return info_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mock_audio_encoder.h b/third_party/libwebrtc/test/mock_audio_encoder.h
new file mode 100644
index 0000000000..1f4510e885
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_encoder.h
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_AUDIO_ENCODER_H_
+#define TEST_MOCK_AUDIO_ENCODER_H_
+
+#include <string>
+
+#include "api/array_view.h"
+#include "api/audio_codecs/audio_encoder.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockAudioEncoder : public AudioEncoder {
+ public:
+ MockAudioEncoder();
+ ~MockAudioEncoder();
+ MOCK_METHOD(int, SampleRateHz, (), (const, override));
+ MOCK_METHOD(size_t, NumChannels, (), (const, override));
+ MOCK_METHOD(int, RtpTimestampRateHz, (), (const, override));
+ MOCK_METHOD(size_t, Num10MsFramesInNextPacket, (), (const, override));
+ MOCK_METHOD(size_t, Max10MsFramesInAPacket, (), (const, override));
+ MOCK_METHOD(int, GetTargetBitrate, (), (const, override));
+ MOCK_METHOD((absl::optional<std::pair<TimeDelta, TimeDelta>>),
+ GetFrameLengthRange,
+ (),
+ (const, override));
+
+ MOCK_METHOD(void, Reset, (), (override));
+ MOCK_METHOD(bool, SetFec, (bool enable), (override));
+ MOCK_METHOD(bool, SetDtx, (bool enable), (override));
+ MOCK_METHOD(bool, SetApplication, (Application application), (override));
+ MOCK_METHOD(void, SetMaxPlaybackRate, (int frequency_hz), (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkBandwidth,
+ (int target_audio_bitrate_bps,
+ absl::optional<int64_t> probing_interval_ms),
+ (override));
+ MOCK_METHOD(void,
+ OnReceivedUplinkPacketLossFraction,
+ (float uplink_packet_loss_fraction),
+ (override));
+ MOCK_METHOD(void,
+ OnReceivedOverhead,
+ (size_t overhead_bytes_per_packet),
+ (override));
+
+ MOCK_METHOD(bool,
+ EnableAudioNetworkAdaptor,
+ (const std::string& config_string, RtcEventLog*),
+ (override));
+
+ // Note, we explicitly chose not to create a mock for the Encode method.
+ MOCK_METHOD(EncodedInfo,
+ EncodeImpl,
+ (uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer*),
+ (override));
+
+ class FakeEncoding {
+ public:
+ // Creates a functor that will return `info` and adjust the rtc::Buffer
+ // given as input to it, so it is info.encoded_bytes larger.
+ explicit FakeEncoding(const AudioEncoder::EncodedInfo& info);
+
+ // Shorthand version of the constructor above, for when only setting
+ // encoded_bytes in the EncodedInfo object matters.
+ explicit FakeEncoding(size_t encoded_bytes);
+
+ AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer* encoded);
+
+ private:
+ AudioEncoder::EncodedInfo info_;
+ };
+
+ class CopyEncoding {
+ public:
+ ~CopyEncoding();
+
+ // Creates a functor that will return `info` and append the data in the
+ // payload to the buffer given as input to it. Up to info.encoded_bytes are
+ // appended - make sure the payload is big enough! Since it uses an
+ // ArrayView, it _does not_ copy the payload. Make sure it doesn't fall out
+ // of scope!
+ CopyEncoding(AudioEncoder::EncodedInfo info,
+ rtc::ArrayView<const uint8_t> payload);
+
+ // Shorthand version of the constructor above, for when you wish to append
+ // the whole payload and do not care about any EncodedInfo attribute other
+ // than encoded_bytes.
+ explicit CopyEncoding(rtc::ArrayView<const uint8_t> payload);
+
+ AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
+ rtc::ArrayView<const int16_t> audio,
+ rtc::Buffer* encoded);
+
+ private:
+ AudioEncoder::EncodedInfo info_;
+ rtc::ArrayView<const uint8_t> payload_;
+ };
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_AUDIO_ENCODER_H_
diff --git a/third_party/libwebrtc/test/mock_audio_encoder_factory.h b/third_party/libwebrtc/test/mock_audio_encoder_factory.h
new file mode 100644
index 0000000000..eaa5b8f17d
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_audio_encoder_factory.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_AUDIO_ENCODER_FACTORY_H_
+#define TEST_MOCK_AUDIO_ENCODER_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/make_ref_counted.h"
+#include "api/scoped_refptr.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockAudioEncoderFactory
+ : public ::testing::NiceMock<AudioEncoderFactory> {
+ public:
+ MOCK_METHOD(std::vector<AudioCodecSpec>,
+ GetSupportedEncoders,
+ (),
+ (override));
+ MOCK_METHOD(absl::optional<AudioCodecInfo>,
+ QueryAudioEncoder,
+ (const SdpAudioFormat& format),
+ (override));
+
+ std::unique_ptr<AudioEncoder> MakeAudioEncoder(
+ int payload_type,
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id) override {
+ std::unique_ptr<AudioEncoder> return_value;
+ MakeAudioEncoderMock(payload_type, format, codec_pair_id, &return_value);
+ return return_value;
+ }
+ MOCK_METHOD(void,
+ MakeAudioEncoderMock,
+ (int payload_type,
+ const SdpAudioFormat& format,
+ absl::optional<AudioCodecPairId> codec_pair_id,
+ std::unique_ptr<AudioEncoder>*));
+
+ // Creates a MockAudioEncoderFactory with no formats and that may not be
+ // invoked to create a codec - useful for initializing a voice engine, for
+ // example.
+ static rtc::scoped_refptr<webrtc::MockAudioEncoderFactory>
+ CreateUnusedFactory() {
+ using ::testing::_;
+ using ::testing::AnyNumber;
+ using ::testing::Return;
+
+ auto factory = rtc::make_ref_counted<webrtc::MockAudioEncoderFactory>();
+ ON_CALL(*factory.get(), GetSupportedEncoders())
+ .WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
+ ON_CALL(*factory.get(), QueryAudioEncoder(_))
+ .WillByDefault(Return(absl::nullopt));
+
+ EXPECT_CALL(*factory.get(), GetSupportedEncoders()).Times(AnyNumber());
+ EXPECT_CALL(*factory.get(), QueryAudioEncoder(_)).Times(AnyNumber());
+ EXPECT_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _)).Times(0);
+ return factory;
+ }
+
+ // Creates a MockAudioEncoderFactory with no formats that may be invoked to
+ // create a codec any number of times. It will, though, return nullptr on each
+ // call, since it supports no codecs.
+ static rtc::scoped_refptr<webrtc::MockAudioEncoderFactory>
+ CreateEmptyFactory() {
+ using ::testing::_;
+ using ::testing::AnyNumber;
+ using ::testing::Return;
+ using ::testing::SetArgPointee;
+
+ auto factory = rtc::make_ref_counted<webrtc::MockAudioEncoderFactory>();
+ ON_CALL(*factory.get(), GetSupportedEncoders())
+ .WillByDefault(Return(std::vector<webrtc::AudioCodecSpec>()));
+ ON_CALL(*factory.get(), QueryAudioEncoder(_))
+ .WillByDefault(Return(absl::nullopt));
+ ON_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _))
+ .WillByDefault(SetArgPointee<3>(nullptr));
+
+ EXPECT_CALL(*factory.get(), GetSupportedEncoders()).Times(AnyNumber());
+ EXPECT_CALL(*factory.get(), QueryAudioEncoder(_)).Times(AnyNumber());
+ EXPECT_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _))
+ .Times(AnyNumber());
+ return factory;
+ }
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_AUDIO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/test/mock_frame_transformer.h b/third_party/libwebrtc/test/mock_frame_transformer.h
new file mode 100644
index 0000000000..617cda8a43
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_frame_transformer.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_FRAME_TRANSFORMER_H_
+#define TEST_MOCK_FRAME_TRANSFORMER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/frame_transformer_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockFrameTransformer : public FrameTransformerInterface {
+ public:
+ MOCK_METHOD(void,
+ Transform,
+ (std::unique_ptr<TransformableFrameInterface>),
+ (override));
+ MOCK_METHOD(void,
+ RegisterTransformedFrameCallback,
+ (rtc::scoped_refptr<TransformedFrameCallback>),
+ (override));
+ MOCK_METHOD(void,
+ RegisterTransformedFrameSinkCallback,
+ (rtc::scoped_refptr<TransformedFrameCallback>, uint32_t),
+ (override));
+ MOCK_METHOD(void, UnregisterTransformedFrameCallback, (), (override));
+ MOCK_METHOD(void,
+ UnregisterTransformedFrameSinkCallback,
+ (uint32_t),
+ (override));
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_FRAME_TRANSFORMER_H_
diff --git a/third_party/libwebrtc/test/mock_transformable_frame.h b/third_party/libwebrtc/test/mock_transformable_frame.h
new file mode 100644
index 0000000000..039013f218
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_transformable_frame.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_TRANSFORMABLE_FRAME_H_
+#define TEST_MOCK_TRANSFORMABLE_FRAME_H_
+
+#include "api/frame_transformer_interface.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockTransformableFrame : public TransformableFrameInterface {
+ public:
+ MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const, override));
+ MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t>), (override));
+ MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override));
+ MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
+ MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override));
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_TRANSFORMABLE_FRAME_H_
diff --git a/third_party/libwebrtc/test/mock_transport.cc b/third_party/libwebrtc/test/mock_transport.cc
new file mode 100644
index 0000000000..3878b3bdbe
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_transport.cc
@@ -0,0 +1,18 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/mock_transport.h"
+
+namespace webrtc {
+
+MockTransport::MockTransport() = default;
+MockTransport::~MockTransport() = default;
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/mock_transport.h b/third_party/libwebrtc/test/mock_transport.h
new file mode 100644
index 0000000000..9c4dc4bf8d
--- /dev/null
+++ b/third_party/libwebrtc/test/mock_transport.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_MOCK_TRANSPORT_H_
+#define TEST_MOCK_TRANSPORT_H_
+
+#include "api/call/transport.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockTransport : public Transport {
+ public:
+ MockTransport();
+ ~MockTransport();
+
+ MOCK_METHOD(bool,
+ SendRtp,
+ (const uint8_t*, size_t, const PacketOptions&),
+ (override));
+ MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t len), (override));
+};
+
+} // namespace webrtc
+
+#endif // TEST_MOCK_TRANSPORT_H_
diff --git a/third_party/libwebrtc/test/network/BUILD.gn b/third_party/libwebrtc/test/network/BUILD.gn
new file mode 100644
index 0000000000..20b17bc804
--- /dev/null
+++ b/third_party/libwebrtc/test/network/BUILD.gn
@@ -0,0 +1,199 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_library("emulated_network") {
+ visibility = [
+ ":*",
+ "../../api:create_network_emulation_manager",
+ "../../api/test/network_emulation:create_cross_traffic",
+ ]
+ if (rtc_include_tests) {
+ visibility += [
+ "../peer_scenario:*",
+ "../scenario:*",
+ ]
+ }
+ testonly = true
+ sources = [
+ "cross_traffic.cc",
+ "cross_traffic.h",
+ "emulated_network_manager.cc",
+ "emulated_network_manager.h",
+ "emulated_turn_server.cc",
+ "emulated_turn_server.h",
+ "fake_network_socket_server.cc",
+ "fake_network_socket_server.h",
+ "network_emulation.cc",
+ "network_emulation.h",
+ "network_emulation_manager.cc",
+ "network_emulation_manager.h",
+ "traffic_route.cc",
+ "traffic_route.h",
+ ]
+ deps = [
+ "../../api:array_view",
+ "../../api:field_trials_view",
+ "../../api:network_emulation_manager_api",
+ "../../api:packet_socket_factory",
+ "../../api:scoped_refptr",
+ "../../api:sequence_checker",
+ "../../api:simulated_network_api",
+ "../../api:time_controller",
+ "../../api/numerics",
+ "../../api/task_queue:pending_task_safety_flag",
+ "../../api/test/network_emulation",
+ "../../api/transport:stun_types",
+ "../../api/units:data_rate",
+ "../../api/units:data_size",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
+ "../../call:simulated_network",
+ "../../p2p:p2p_server_utils",
+ "../../p2p:rtc_p2p",
+ "../../rtc_base:async_packet_socket",
+ "../../rtc_base:copy_on_write_buffer",
+ "../../rtc_base:ip_address",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:network",
+ "../../rtc_base:network_constants",
+ "../../rtc_base:random",
+ "../../rtc_base:rtc_base_tests_utils",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:safe_minmax",
+ "../../rtc_base:socket",
+ "../../rtc_base:socket_address",
+ "../../rtc_base:socket_server",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base:threading",
+ "../../rtc_base/memory:always_valid_pointer",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/system:no_unique_address",
+ "../../rtc_base/task_utils:repeating_task",
+ "../../system_wrappers",
+ "../../test:scoped_key_value_config",
+ "../scenario:column_printer",
+ "../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("network_emulation_unittest") {
+ testonly = true
+ sources = [ "network_emulation_unittest.cc" ]
+ deps = [
+ ":emulated_network",
+ "../:test_support",
+ "../../api:simulated_network_api",
+ "../../api/units:time_delta",
+ "../../call:simulated_network",
+ "../../rtc_base:gunit_helpers",
+ "../../rtc_base:logging",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base/synchronization:mutex",
+ ]
+}
+
+if (rtc_include_tests && !build_with_chromium) {
+ rtc_library("network_emulation_pc_unittest") {
+ testonly = true
+ sources = [ "network_emulation_pc_unittest.cc" ]
+ deps = [
+ ":emulated_network",
+ "../:test_support",
+ "../../api:callfactory_api",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:scoped_refptr",
+ "../../api:simulated_network_api",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/transport:field_trial_based_config",
+ "../../call:simulated_network",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_media_engine_defaults",
+ "../../modules/audio_device:audio_device_impl",
+ "../../p2p:rtc_p2p",
+ "../../pc:pc_test_utils",
+ "../../pc:peerconnection_wrapper",
+ "../../rtc_base:gunit_helpers",
+ "../../rtc_base:logging",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:task_queue_for_test",
+ ]
+ }
+}
+
+rtc_library("cross_traffic_unittest") {
+ testonly = true
+ sources = [ "cross_traffic_unittest.cc" ]
+ deps = [
+ ":emulated_network",
+ "../:test_support",
+ "../../api:network_emulation_manager_api",
+ "../../api:simulated_network_api",
+ "../../call:simulated_network",
+ "../../rtc_base:logging",
+ "../../rtc_base:network_constants",
+ "../../rtc_base:rtc_event",
+ "../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_library("feedback_generator") {
+ testonly = true
+ sources = [
+ "feedback_generator.cc",
+ "feedback_generator.h",
+ ]
+ deps = [
+ ":emulated_network",
+ "../../api/transport:network_control",
+ "../../api/transport:test_feedback_generator_interface",
+ "../../call:simulated_network",
+ "../../rtc_base:checks",
+ "../time_controller",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+ }
+
+ rtc_library("feedback_generator_unittest") {
+ testonly = true
+ sources = [ "feedback_generator_unittest.cc" ]
+ deps = [
+ "../:test_support",
+ "../../api/transport:test_feedback_generator",
+ ]
+ }
+
+ if (!build_with_chromium) {
+ rtc_library("network_emulation_unittests") {
+ testonly = true
+ deps = [
+ ":cross_traffic_unittest",
+ ":feedback_generator_unittest",
+ ":network_emulation_pc_unittest",
+ ":network_emulation_unittest",
+ ]
+ }
+ }
+}
diff --git a/third_party/libwebrtc/test/network/OWNERS b/third_party/libwebrtc/test/network/OWNERS
new file mode 100644
index 0000000000..b177c4eec5
--- /dev/null
+++ b/third_party/libwebrtc/test/network/OWNERS
@@ -0,0 +1 @@
+titovartem@webrtc.org
diff --git a/third_party/libwebrtc/test/network/cross_traffic.cc b/third_party/libwebrtc/test/network/cross_traffic.cc
new file mode 100644
index 0000000000..0a817a2d39
--- /dev/null
+++ b/third_party/libwebrtc/test/network/cross_traffic.cc
@@ -0,0 +1,322 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/cross_traffic.h"
+
+#include <math.h>
+
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "cross_traffic.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+namespace test {
+
+RandomWalkCrossTraffic::RandomWalkCrossTraffic(RandomWalkConfig config,
+ CrossTrafficRoute* traffic_route)
+ : config_(config),
+ traffic_route_(traffic_route),
+ random_(config_.random_seed) {
+ sequence_checker_.Detach();
+}
+RandomWalkCrossTraffic::~RandomWalkCrossTraffic() = default;
+
+void RandomWalkCrossTraffic::Process(Timestamp at_time) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (last_process_time_.IsMinusInfinity()) {
+ last_process_time_ = at_time;
+ }
+ TimeDelta delta = at_time - last_process_time_;
+ last_process_time_ = at_time;
+
+ if (at_time - last_update_time_ >= config_.update_interval) {
+ intensity_ += random_.Gaussian(config_.bias, config_.variance) *
+ sqrt((at_time - last_update_time_).seconds<double>());
+ intensity_ = rtc::SafeClamp(intensity_, 0.0, 1.0);
+ last_update_time_ = at_time;
+ }
+ pending_size_ += TrafficRate() * delta;
+
+ if (pending_size_ >= config_.min_packet_size &&
+ at_time >= last_send_time_ + config_.min_packet_interval) {
+ traffic_route_->SendPacket(pending_size_.bytes());
+ pending_size_ = DataSize::Zero();
+ last_send_time_ = at_time;
+ }
+}
+
+TimeDelta RandomWalkCrossTraffic::GetProcessInterval() const {
+ return config_.min_packet_interval;
+}
+
+DataRate RandomWalkCrossTraffic::TrafficRate() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return config_.peak_rate * intensity_;
+}
+
+ColumnPrinter RandomWalkCrossTraffic::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "random_walk_cross_traffic_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.0lf", TrafficRate().bps() / 8.0);
+ },
+ 32);
+}
+
+PulsedPeaksCrossTraffic::PulsedPeaksCrossTraffic(
+ PulsedPeaksConfig config,
+ CrossTrafficRoute* traffic_route)
+ : config_(config), traffic_route_(traffic_route) {
+ sequence_checker_.Detach();
+}
+PulsedPeaksCrossTraffic::~PulsedPeaksCrossTraffic() = default;
+
+void PulsedPeaksCrossTraffic::Process(Timestamp at_time) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ TimeDelta time_since_toggle = at_time - last_update_time_;
+ if (time_since_toggle.IsInfinite() ||
+ (sending_ && time_since_toggle >= config_.send_duration)) {
+ sending_ = false;
+ last_update_time_ = at_time;
+ } else if (!sending_ && time_since_toggle >= config_.hold_duration) {
+ sending_ = true;
+ last_update_time_ = at_time;
+ // Start sending period.
+ last_send_time_ = at_time;
+ }
+
+ if (sending_) {
+ DataSize pending_size = config_.peak_rate * (at_time - last_send_time_);
+
+ if (pending_size >= config_.min_packet_size &&
+ at_time >= last_send_time_ + config_.min_packet_interval) {
+ traffic_route_->SendPacket(pending_size.bytes());
+ last_send_time_ = at_time;
+ }
+ }
+}
+
+TimeDelta PulsedPeaksCrossTraffic::GetProcessInterval() const {
+ return config_.min_packet_interval;
+}
+
+DataRate PulsedPeaksCrossTraffic::TrafficRate() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return sending_ ? config_.peak_rate : DataRate::Zero();
+}
+
+ColumnPrinter PulsedPeaksCrossTraffic::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "pulsed_peaks_cross_traffic_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.0lf", TrafficRate().bps() / 8.0);
+ },
+ 32);
+}
+
+TcpMessageRouteImpl::TcpMessageRouteImpl(Clock* clock,
+ TaskQueueBase* task_queue,
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route)
+ : clock_(clock),
+ task_queue_(task_queue),
+ request_route_(send_route,
+ [this](TcpPacket packet, Timestamp) {
+ OnRequest(std::move(packet));
+ }),
+ response_route_(ret_route,
+ [this](TcpPacket packet, Timestamp arrival_time) {
+ OnResponse(std::move(packet), arrival_time);
+ }) {}
+
+void TcpMessageRouteImpl::SendMessage(size_t size,
+ std::function<void()> on_received) {
+ task_queue_->PostTask(
+ [this, size, handler = std::move(on_received)] {
+ // If we are currently sending a message we won't reset the connection,
+ // we'll act as if the messages are sent in the same TCP stream. This is
+ // intended to simulate recreation of a TCP session for each message
+ // in the typical case while avoiding the complexity overhead of
+ // maintaining multiple virtual TCP sessions in parallel.
+ if (pending_.empty() && in_flight_.empty()) {
+ cwnd_ = 10;
+ ssthresh_ = INFINITY;
+ }
+ int64_t data_left = static_cast<int64_t>(size);
+ int64_t kMaxPacketSize = 1200;
+ int64_t kMinPacketSize = 4;
+ Message message{std::move(handler)};
+ while (data_left > 0) {
+ int64_t packet_size = std::min(data_left, kMaxPacketSize);
+ int fragment_id = next_fragment_id_++;
+ pending_.push_back(MessageFragment{
+ fragment_id,
+ static_cast<size_t>(std::max(kMinPacketSize, packet_size))});
+ message.pending_fragment_ids.insert(fragment_id);
+ data_left -= packet_size;
+ }
+ messages_.emplace_back(message);
+ SendPackets(clock_->CurrentTime());
+ });
+}
+
+void TcpMessageRouteImpl::OnRequest(TcpPacket packet_info) {
+ for (auto it = messages_.begin(); it != messages_.end(); ++it) {
+ if (it->pending_fragment_ids.count(packet_info.fragment.fragment_id) != 0) {
+ it->pending_fragment_ids.erase(packet_info.fragment.fragment_id);
+ if (it->pending_fragment_ids.empty()) {
+ it->handler();
+ messages_.erase(it);
+ }
+ break;
+ }
+ }
+ const size_t kAckPacketSize = 20;
+ response_route_.SendPacket(kAckPacketSize, packet_info);
+}
+
+void TcpMessageRouteImpl::OnResponse(TcpPacket packet_info, Timestamp at_time) {
+ auto it = in_flight_.find(packet_info.sequence_number);
+ if (it != in_flight_.end()) {
+ last_rtt_ = at_time - packet_info.send_time;
+ in_flight_.erase(it);
+ }
+ auto lost_end = in_flight_.lower_bound(packet_info.sequence_number);
+ for (auto lost_it = in_flight_.begin(); lost_it != lost_end;
+ lost_it = in_flight_.erase(lost_it)) {
+ pending_.push_front(lost_it->second.fragment);
+ }
+
+ if (packet_info.sequence_number - last_acked_seq_num_ > 1) {
+ HandleLoss(at_time);
+ } else if (cwnd_ <= ssthresh_) {
+ cwnd_ += 1;
+ } else {
+ cwnd_ += 1.0f / cwnd_;
+ }
+ last_acked_seq_num_ =
+ std::max(packet_info.sequence_number, last_acked_seq_num_);
+ SendPackets(at_time);
+}
+
+void TcpMessageRouteImpl::HandleLoss(Timestamp at_time) {
+ if (at_time - last_reduction_time_ < last_rtt_)
+ return;
+ last_reduction_time_ = at_time;
+ ssthresh_ = std::max(static_cast<int>(in_flight_.size() / 2), 2);
+ cwnd_ = ssthresh_;
+}
+
+void TcpMessageRouteImpl::SendPackets(Timestamp at_time) {
+ const TimeDelta kPacketTimeout = TimeDelta::Seconds(1);
+ int cwnd = std::ceil(cwnd_);
+ int packets_to_send = std::max(cwnd - static_cast<int>(in_flight_.size()), 0);
+ while (packets_to_send-- > 0 && !pending_.empty()) {
+ auto seq_num = next_sequence_number_++;
+ TcpPacket send;
+ send.sequence_number = seq_num;
+ send.send_time = at_time;
+ send.fragment = pending_.front();
+ pending_.pop_front();
+ request_route_.SendPacket(send.fragment.size, send);
+ in_flight_.insert({seq_num, send});
+ task_queue_->PostDelayedTask(
+ [this, seq_num] {
+ HandlePacketTimeout(seq_num, clock_->CurrentTime());
+ },
+ kPacketTimeout);
+ }
+}
+
+void TcpMessageRouteImpl::HandlePacketTimeout(int seq_num, Timestamp at_time) {
+ auto lost = in_flight_.find(seq_num);
+ if (lost != in_flight_.end()) {
+ pending_.push_front(lost->second.fragment);
+ in_flight_.erase(lost);
+ HandleLoss(at_time);
+ SendPackets(at_time);
+ }
+}
+
+FakeTcpCrossTraffic::FakeTcpCrossTraffic(FakeTcpConfig config,
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route)
+ : conf_(config), route_(this, send_route, ret_route) {}
+
+TimeDelta FakeTcpCrossTraffic::GetProcessInterval() const {
+ return conf_.process_interval;
+}
+
+void FakeTcpCrossTraffic::Process(Timestamp at_time) {
+ SendPackets(at_time);
+}
+
+void FakeTcpCrossTraffic::OnRequest(int sequence_number, Timestamp at_time) {
+ const size_t kAckPacketSize = 20;
+ route_.SendResponse(kAckPacketSize, sequence_number);
+}
+
+void FakeTcpCrossTraffic::OnResponse(int sequence_number, Timestamp at_time) {
+ ack_received_ = true;
+ auto it = in_flight_.find(sequence_number);
+ if (it != in_flight_.end()) {
+ last_rtt_ = at_time - in_flight_.at(sequence_number);
+ in_flight_.erase(sequence_number);
+ }
+ if (sequence_number - last_acked_seq_num_ > 1) {
+ HandleLoss(at_time);
+ } else if (cwnd_ <= ssthresh_) {
+ cwnd_ += 1;
+ } else {
+ cwnd_ += 1.0f / cwnd_;
+ }
+ last_acked_seq_num_ = std::max(sequence_number, last_acked_seq_num_);
+ SendPackets(at_time);
+}
+
+void FakeTcpCrossTraffic::HandleLoss(Timestamp at_time) {
+ if (at_time - last_reduction_time_ < last_rtt_)
+ return;
+ last_reduction_time_ = at_time;
+ ssthresh_ = std::max(static_cast<int>(in_flight_.size() / 2), 2);
+ cwnd_ = ssthresh_;
+}
+
+void FakeTcpCrossTraffic::SendPackets(Timestamp at_time) {
+ int cwnd = std::ceil(cwnd_);
+ int packets_to_send = std::max(cwnd - static_cast<int>(in_flight_.size()), 0);
+ bool timeouts = false;
+ for (auto it = in_flight_.begin(); it != in_flight_.end();) {
+ if (it->second < at_time - conf_.packet_timeout) {
+ it = in_flight_.erase(it);
+ timeouts = true;
+ } else {
+ ++it;
+ }
+ }
+ if (timeouts)
+ HandleLoss(at_time);
+ for (int i = 0; i < packets_to_send; ++i) {
+ if ((total_sent_ + conf_.packet_size) > conf_.send_limit) {
+ break;
+ }
+ in_flight_.insert({next_sequence_number_, at_time});
+ route_.SendRequest(conf_.packet_size.bytes<size_t>(),
+ next_sequence_number_++);
+ total_sent_ += conf_.packet_size;
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/cross_traffic.h b/third_party/libwebrtc/test/network/cross_traffic.h
new file mode 100644
index 0000000000..d21e942475
--- /dev/null
+++ b/third_party/libwebrtc/test/network/cross_traffic.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_CROSS_TRAFFIC_H_
+#define TEST_NETWORK_CROSS_TRAFFIC_H_
+
+#include <algorithm>
+#include <map>
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/units/data_rate.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/random.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/column_printer.h"
+
+namespace webrtc {
+namespace test {
+
+class RandomWalkCrossTraffic final : public CrossTrafficGenerator {
+ public:
+ RandomWalkCrossTraffic(RandomWalkConfig config,
+ CrossTrafficRoute* traffic_route);
+ ~RandomWalkCrossTraffic();
+
+ void Process(Timestamp at_time) override;
+ TimeDelta GetProcessInterval() const override;
+ DataRate TrafficRate() const;
+ ColumnPrinter StatsPrinter();
+
+ private:
+ SequenceChecker sequence_checker_;
+ const RandomWalkConfig config_;
+ CrossTrafficRoute* const traffic_route_ RTC_PT_GUARDED_BY(sequence_checker_);
+ webrtc::Random random_ RTC_GUARDED_BY(sequence_checker_);
+
+ Timestamp last_process_time_ RTC_GUARDED_BY(sequence_checker_) =
+ Timestamp::MinusInfinity();
+ Timestamp last_update_time_ RTC_GUARDED_BY(sequence_checker_) =
+ Timestamp::MinusInfinity();
+ Timestamp last_send_time_ RTC_GUARDED_BY(sequence_checker_) =
+ Timestamp::MinusInfinity();
+ double intensity_ RTC_GUARDED_BY(sequence_checker_) = 0;
+ DataSize pending_size_ RTC_GUARDED_BY(sequence_checker_) = DataSize::Zero();
+};
+
+class PulsedPeaksCrossTraffic final : public CrossTrafficGenerator {
+ public:
+ PulsedPeaksCrossTraffic(PulsedPeaksConfig config,
+ CrossTrafficRoute* traffic_route);
+ ~PulsedPeaksCrossTraffic();
+
+ void Process(Timestamp at_time) override;
+ TimeDelta GetProcessInterval() const override;
+ DataRate TrafficRate() const;
+ ColumnPrinter StatsPrinter();
+
+ private:
+ SequenceChecker sequence_checker_;
+ const PulsedPeaksConfig config_;
+ CrossTrafficRoute* const traffic_route_ RTC_PT_GUARDED_BY(sequence_checker_);
+
+ Timestamp last_update_time_ RTC_GUARDED_BY(sequence_checker_) =
+ Timestamp::MinusInfinity();
+ Timestamp last_send_time_ RTC_GUARDED_BY(sequence_checker_) =
+ Timestamp::MinusInfinity();
+ bool sending_ RTC_GUARDED_BY(sequence_checker_) = false;
+};
+
+class TcpMessageRouteImpl final : public TcpMessageRoute {
+ public:
+ TcpMessageRouteImpl(Clock* clock,
+ TaskQueueBase* task_queue,
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route);
+
+ // Sends a TCP message of the given `size` over the route, `on_received` is
+ // called when the message has been delivered. Note that the connection
+ // parameters are reset iff there's no currently pending message on the route.
+ void SendMessage(size_t size, std::function<void()> on_received) override;
+
+ private:
+ // Represents a message sent over the route. When all fragments has been
+ // delivered, the message is considered delivered and the handler is
+ // triggered. This only happen once.
+ struct Message {
+ std::function<void()> handler;
+ std::set<int> pending_fragment_ids;
+ };
+ // Represents a piece of a message that fit into a TCP packet.
+ struct MessageFragment {
+ int fragment_id;
+ size_t size;
+ };
+ // Represents a packet sent on the wire.
+ struct TcpPacket {
+ int sequence_number;
+ Timestamp send_time = Timestamp::MinusInfinity();
+ MessageFragment fragment;
+ };
+
+ void OnRequest(TcpPacket packet_info);
+ void OnResponse(TcpPacket packet_info, Timestamp at_time);
+ void HandleLoss(Timestamp at_time);
+ void SendPackets(Timestamp at_time);
+ void HandlePacketTimeout(int seq_num, Timestamp at_time);
+
+ Clock* const clock_;
+ TaskQueueBase* const task_queue_;
+ FakePacketRoute<TcpPacket> request_route_;
+ FakePacketRoute<TcpPacket> response_route_;
+
+ std::deque<MessageFragment> pending_;
+ std::map<int, TcpPacket> in_flight_;
+ std::list<Message> messages_;
+
+ double cwnd_;
+ double ssthresh_;
+
+ int last_acked_seq_num_ = 0;
+ int next_sequence_number_ = 0;
+ int next_fragment_id_ = 0;
+ Timestamp last_reduction_time_ = Timestamp::MinusInfinity();
+ TimeDelta last_rtt_ = TimeDelta::Zero();
+};
+
+class FakeTcpCrossTraffic
+ : public TwoWayFakeTrafficRoute<int, int>::TrafficHandlerInterface,
+ public CrossTrafficGenerator {
+ public:
+ FakeTcpCrossTraffic(FakeTcpConfig config,
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route);
+
+ TimeDelta GetProcessInterval() const override;
+ void Process(Timestamp at_time) override;
+
+ void OnRequest(int sequence_number, Timestamp at_time) override;
+ void OnResponse(int sequence_number, Timestamp at_time) override;
+
+ void HandleLoss(Timestamp at_time);
+
+ void SendPackets(Timestamp at_time);
+
+ private:
+ const FakeTcpConfig conf_;
+ TwoWayFakeTrafficRoute<int, int> route_;
+
+ std::map<int, Timestamp> in_flight_;
+ double cwnd_ = 10;
+ double ssthresh_ = INFINITY;
+ bool ack_received_ = false;
+ int last_acked_seq_num_ = 0;
+ int next_sequence_number_ = 0;
+ Timestamp last_reduction_time_ = Timestamp::MinusInfinity();
+ TimeDelta last_rtt_ = TimeDelta::Zero();
+ DataSize total_sent_ = DataSize::Zero();
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_CROSS_TRAFFIC_H_
diff --git a/third_party/libwebrtc/test/network/cross_traffic_unittest.cc b/third_party/libwebrtc/test/network/cross_traffic_unittest.cc
new file mode 100644
index 0000000000..36aff67bb2
--- /dev/null
+++ b/third_party/libwebrtc/test/network/cross_traffic_unittest.cc
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/cross_traffic.h"
+
+#include <atomic>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/types/optional.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/simulated_network.h"
+#include "call/simulated_network.h"
+#include "rtc_base/event.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/network_constants.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/network/traffic_route.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr uint32_t kTestIpAddress = 0xC0A80011; // 192.168.0.17
+
+class CountingReceiver : public EmulatedNetworkReceiverInterface {
+ public:
+ void OnPacketReceived(EmulatedIpPacket packet) override {
+ packets_count_++;
+ total_packets_size_ += packet.size();
+ }
+
+ std::atomic<int> packets_count_{0};
+ std::atomic<uint64_t> total_packets_size_{0};
+};
+struct TrafficCounterFixture {
+ SimulatedClock clock{0};
+ CountingReceiver counter;
+ TaskQueueForTest task_queue_;
+ EmulatedEndpointImpl endpoint{EmulatedEndpointImpl::Options{
+ /*id=*/1,
+ rtc::IPAddress(kTestIpAddress),
+ EmulatedEndpointConfig(),
+ EmulatedNetworkStatsGatheringMode::kDefault,
+ },
+ /*is_enabled=*/true, &task_queue_, &clock};
+};
+
+} // namespace
+
+TEST(CrossTrafficTest, TriggerPacketBurst) {
+ TrafficCounterFixture fixture;
+ CrossTrafficRouteImpl traffic(&fixture.clock, &fixture.counter,
+ &fixture.endpoint);
+ traffic.TriggerPacketBurst(100, 1000);
+
+ EXPECT_EQ(fixture.counter.packets_count_, 100);
+ EXPECT_EQ(fixture.counter.total_packets_size_, 100 * 1000ul);
+}
+
+TEST(CrossTrafficTest, PulsedPeaksCrossTraffic) {
+ TrafficCounterFixture fixture;
+ CrossTrafficRouteImpl traffic(&fixture.clock, &fixture.counter,
+ &fixture.endpoint);
+
+ PulsedPeaksConfig config;
+ config.peak_rate = DataRate::KilobitsPerSec(1000);
+ config.min_packet_size = DataSize::Bytes(1);
+ config.min_packet_interval = TimeDelta::Millis(25);
+ config.send_duration = TimeDelta::Millis(500);
+ config.hold_duration = TimeDelta::Millis(250);
+ PulsedPeaksCrossTraffic pulsed_peaks(config, &traffic);
+ const auto kRunTime = TimeDelta::Seconds(1);
+ while (fixture.clock.TimeInMilliseconds() < kRunTime.ms()) {
+ pulsed_peaks.Process(Timestamp::Millis(fixture.clock.TimeInMilliseconds()));
+ fixture.clock.AdvanceTimeMilliseconds(1);
+ }
+
+ RTC_LOG(LS_INFO) << fixture.counter.packets_count_ << " packets; "
+ << fixture.counter.total_packets_size_ << " bytes";
+ // Using 50% duty cycle.
+ const auto kExpectedDataSent = kRunTime * config.peak_rate * 0.5;
+ EXPECT_NEAR(fixture.counter.total_packets_size_, kExpectedDataSent.bytes(),
+ kExpectedDataSent.bytes() * 0.1);
+}
+
+TEST(CrossTrafficTest, RandomWalkCrossTraffic) {
+ TrafficCounterFixture fixture;
+ CrossTrafficRouteImpl traffic(&fixture.clock, &fixture.counter,
+ &fixture.endpoint);
+
+ RandomWalkConfig config;
+ config.peak_rate = DataRate::KilobitsPerSec(1000);
+ config.min_packet_size = DataSize::Bytes(1);
+ config.min_packet_interval = TimeDelta::Millis(25);
+ config.update_interval = TimeDelta::Millis(500);
+ config.variance = 0.0;
+ config.bias = 1.0;
+
+ RandomWalkCrossTraffic random_walk(config, &traffic);
+ const auto kRunTime = TimeDelta::Seconds(1);
+ while (fixture.clock.TimeInMilliseconds() < kRunTime.ms()) {
+ random_walk.Process(Timestamp::Millis(fixture.clock.TimeInMilliseconds()));
+ fixture.clock.AdvanceTimeMilliseconds(1);
+ }
+
+ RTC_LOG(LS_INFO) << fixture.counter.packets_count_ << " packets; "
+ << fixture.counter.total_packets_size_ << " bytes";
+ // Sending at peak rate since bias = 1.
+ const auto kExpectedDataSent = kRunTime * config.peak_rate;
+ EXPECT_NEAR(fixture.counter.total_packets_size_, kExpectedDataSent.bytes(),
+ kExpectedDataSent.bytes() * 0.1);
+}
+
+TEST(TcpMessageRouteTest, DeliveredOnLossyNetwork) {
+ NetworkEmulationManagerImpl net(TimeMode::kSimulated,
+ EmulatedNetworkStatsGatheringMode::kDefault);
+ BuiltInNetworkBehaviorConfig send;
+ // 800 kbps means that the 100 kB message would be delivered in ca 1 second
+ // under ideal conditions and no overhead.
+ send.link_capacity_kbps = 100 * 8;
+ send.loss_percent = 50;
+ send.queue_delay_ms = 100;
+ send.delay_standard_deviation_ms = 20;
+ send.allow_reordering = true;
+ auto ret = send;
+ ret.loss_percent = 10;
+
+ auto* tcp_route =
+ net.CreateTcpRoute(net.CreateRoute({net.CreateEmulatedNode(send)}),
+ net.CreateRoute({net.CreateEmulatedNode(ret)}));
+ int deliver_count = 0;
+ // 100 kB is more than what fits into a single packet.
+ constexpr size_t kMessageSize = 100000;
+
+ tcp_route->SendMessage(kMessageSize, [&] {
+ RTC_LOG(LS_INFO) << "Received at " << ToString(net.Now());
+ deliver_count++;
+ });
+
+ // If there was no loss, we would have delivered the message in ca 1 second,
+ // with 50% it should take much longer.
+ net.time_controller()->AdvanceTime(TimeDelta::Seconds(5));
+ ASSERT_EQ(deliver_count, 0);
+ // But given enough time the messsage will be delivered, but only once.
+ net.time_controller()->AdvanceTime(TimeDelta::Seconds(60));
+ EXPECT_EQ(deliver_count, 1);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/emulated_network_manager.cc b/third_party/libwebrtc/test/network/emulated_network_manager.cc
new file mode 100644
index 0000000000..fa4037e5db
--- /dev/null
+++ b/third_party/libwebrtc/test/network/emulated_network_manager.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/emulated_network_manager.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "test/network/fake_network_socket_server.h"
+
+namespace webrtc {
+namespace test {
+
+EmulatedNetworkManager::EmulatedNetworkManager(
+ TimeController* time_controller,
+ TaskQueueForTest* task_queue,
+ EndpointsContainer* endpoints_container)
+ : task_queue_(task_queue),
+ endpoints_container_(endpoints_container),
+ sent_first_update_(false),
+ start_count_(0) {
+ auto socket_server =
+ std::make_unique<FakeNetworkSocketServer>(endpoints_container);
+ packet_socket_factory_ =
+ std::make_unique<rtc::BasicPacketSocketFactory>(socket_server.get());
+ // Since we pass ownership of the socket server to `network_thread_`, we must
+ // arrange that it outlives `packet_socket_factory_` which refers to it.
+ network_thread_ =
+ time_controller->CreateThread("net_thread", std::move(socket_server));
+}
+
+void EmulatedNetworkManager::EnableEndpoint(EmulatedEndpointImpl* endpoint) {
+ RTC_CHECK(endpoints_container_->HasEndpoint(endpoint))
+ << "No such interface: " << endpoint->GetPeerLocalAddress().ToString();
+ network_thread_->PostTask([this, endpoint]() {
+ endpoint->Enable();
+ UpdateNetworksOnce();
+ });
+}
+
+void EmulatedNetworkManager::DisableEndpoint(EmulatedEndpointImpl* endpoint) {
+ RTC_CHECK(endpoints_container_->HasEndpoint(endpoint))
+ << "No such interface: " << endpoint->GetPeerLocalAddress().ToString();
+ network_thread_->PostTask([this, endpoint]() {
+ endpoint->Disable();
+ UpdateNetworksOnce();
+ });
+}
+
+// Network manager interface. All these methods are supposed to be called from
+// the same thread.
+void EmulatedNetworkManager::StartUpdating() {
+ RTC_DCHECK_RUN_ON(network_thread_.get());
+
+ if (start_count_) {
+ // If network interfaces are already discovered and signal is sent,
+ // we should trigger network signal immediately for the new clients
+ // to start allocating ports.
+ if (sent_first_update_)
+ network_thread_->PostTask([this]() { MaybeSignalNetworksChanged(); });
+ } else {
+ network_thread_->PostTask([this]() { UpdateNetworksOnce(); });
+ }
+ ++start_count_;
+}
+
+void EmulatedNetworkManager::StopUpdating() {
+ RTC_DCHECK_RUN_ON(network_thread_.get());
+ if (!start_count_)
+ return;
+
+ --start_count_;
+ if (!start_count_) {
+ sent_first_update_ = false;
+ }
+}
+
+void EmulatedNetworkManager::GetStats(
+ std::function<void(EmulatedNetworkStats)> stats_callback) const {
+ task_queue_->PostTask([stats_callback, this]() {
+ stats_callback(endpoints_container_->GetStats());
+ });
+}
+
+void EmulatedNetworkManager::UpdateNetworksOnce() {
+ RTC_DCHECK_RUN_ON(network_thread_.get());
+
+ std::vector<std::unique_ptr<rtc::Network>> networks;
+ for (std::unique_ptr<rtc::Network>& net :
+ endpoints_container_->GetEnabledNetworks()) {
+ net->set_default_local_address_provider(this);
+ networks.push_back(std::move(net));
+ }
+
+ bool changed;
+ MergeNetworkList(std::move(networks), &changed);
+ if (changed || !sent_first_update_) {
+ MaybeSignalNetworksChanged();
+ sent_first_update_ = true;
+ }
+}
+
+void EmulatedNetworkManager::MaybeSignalNetworksChanged() {
+ RTC_DCHECK_RUN_ON(network_thread_.get());
+ // If manager is stopped we don't need to signal anything.
+ if (start_count_ == 0) {
+ return;
+ }
+ SignalNetworksChanged();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/emulated_network_manager.h b/third_party/libwebrtc/test/network/emulated_network_manager.h
new file mode 100644
index 0000000000..fb4ee1ee85
--- /dev/null
+++ b/third_party/libwebrtc/test/network/emulated_network_manager.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_EMULATED_NETWORK_MANAGER_H_
+#define TEST_NETWORK_EMULATED_NETWORK_MANAGER_H_
+
+#include <functional>
+#include <memory>
+#include <vector>
+
+#include "api/sequence_checker.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/time_controller.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/network.h"
+#include "rtc_base/socket_server.h"
+#include "rtc_base/thread.h"
+#include "test/network/network_emulation.h"
+
+namespace webrtc {
+namespace test {
+
+// Framework assumes that rtc::NetworkManager is called from network thread.
+class EmulatedNetworkManager : public rtc::NetworkManagerBase,
+ public sigslot::has_slots<>,
+ public EmulatedNetworkManagerInterface {
+ public:
+ EmulatedNetworkManager(TimeController* time_controller,
+ TaskQueueForTest* task_queue,
+ EndpointsContainer* endpoints_container);
+
+ void EnableEndpoint(EmulatedEndpointImpl* endpoint);
+ void DisableEndpoint(EmulatedEndpointImpl* endpoint);
+
+ // NetworkManager interface. All these methods are supposed to be called from
+ // the same thread.
+ void StartUpdating() override;
+ void StopUpdating() override;
+
+ // We don't support any address interfaces in the network emulation framework.
+ std::vector<const rtc::Network*> GetAnyAddressNetworks() override {
+ return {};
+ }
+
+ // EmulatedNetworkManagerInterface API
+ rtc::Thread* network_thread() override { return network_thread_.get(); }
+ rtc::NetworkManager* network_manager() override { return this; }
+ rtc::PacketSocketFactory* packet_socket_factory() override {
+ return packet_socket_factory_.get();
+ }
+ std::vector<EmulatedEndpoint*> endpoints() const override {
+ return endpoints_container_->GetEndpoints();
+ }
+ void GetStats(
+ std::function<void(EmulatedNetworkStats)> stats_callback) const override;
+
+ private:
+ void UpdateNetworksOnce();
+ void MaybeSignalNetworksChanged();
+
+ TaskQueueForTest* const task_queue_;
+ const EndpointsContainer* const endpoints_container_;
+ // The `network_thread_` must outlive `packet_socket_factory_`, because they
+ // both refer to a socket server that is owned by `network_thread_`. Both
+ // pointers are assigned only in the constructor, but the way they are
+ // initialized unfortunately doesn't work with const std::unique_ptr<...>.
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::PacketSocketFactory> packet_socket_factory_;
+ bool sent_first_update_ RTC_GUARDED_BY(network_thread_);
+ int start_count_ RTC_GUARDED_BY(network_thread_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_EMULATED_NETWORK_MANAGER_H_
diff --git a/third_party/libwebrtc/test/network/emulated_turn_server.cc b/third_party/libwebrtc/test/network/emulated_turn_server.cc
new file mode 100644
index 0000000000..0bc7ec6e2a
--- /dev/null
+++ b/third_party/libwebrtc/test/network/emulated_turn_server.cc
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/emulated_turn_server.h"
+
+#include <string>
+#include <utility>
+
+#include "api/packet_socket_factory.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/task_queue_for_test.h"
+
+namespace {
+
+static const char kTestRealm[] = "example.org";
+static const char kTestSoftware[] = "TestTurnServer";
+
+// A wrapper class for copying data between an AsyncPacketSocket and a
+// EmulatedEndpoint. This is used by the cricket::TurnServer when
+// sending data back into the emulated network.
+class AsyncPacketSocketWrapper : public rtc::AsyncPacketSocket {
+ public:
+ AsyncPacketSocketWrapper(webrtc::test::EmulatedTURNServer* turn_server,
+ webrtc::EmulatedEndpoint* endpoint,
+ uint16_t port)
+ : turn_server_(turn_server),
+ endpoint_(endpoint),
+ local_address_(
+ rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), port)) {}
+ ~AsyncPacketSocketWrapper() { turn_server_->Unbind(local_address_); }
+
+ rtc::SocketAddress GetLocalAddress() const override { return local_address_; }
+ rtc::SocketAddress GetRemoteAddress() const override {
+ return rtc::SocketAddress();
+ }
+ int Send(const void* pv,
+ size_t cb,
+ const rtc::PacketOptions& options) override {
+ RTC_CHECK(false) << "TCP not implemented";
+ return -1;
+ }
+ int SendTo(const void* pv,
+ size_t cb,
+ const rtc::SocketAddress& addr,
+ const rtc::PacketOptions& options) override {
+ // Copy from rtc::AsyncPacketSocket to EmulatedEndpoint.
+ rtc::CopyOnWriteBuffer buf(reinterpret_cast<const char*>(pv), cb);
+ endpoint_->SendPacket(local_address_, addr, buf);
+ return cb;
+ }
+ int Close() override { return 0; }
+
+ rtc::AsyncPacketSocket::State GetState() const override {
+ return rtc::AsyncPacketSocket::STATE_BOUND;
+ }
+ int GetOption(rtc::Socket::Option opt, int* value) override { return 0; }
+ int SetOption(rtc::Socket::Option opt, int value) override { return 0; }
+ int GetError() const override { return 0; }
+ void SetError(int error) override {}
+
+ private:
+ webrtc::test::EmulatedTURNServer* const turn_server_;
+ webrtc::EmulatedEndpoint* const endpoint_;
+ const rtc::SocketAddress local_address_;
+};
+
+// A wrapper class for cricket::TurnServer to allocate sockets.
+class PacketSocketFactoryWrapper : public rtc::PacketSocketFactory {
+ public:
+ explicit PacketSocketFactoryWrapper(
+ webrtc::test::EmulatedTURNServer* turn_server)
+ : turn_server_(turn_server) {}
+ ~PacketSocketFactoryWrapper() override {}
+
+ // This method is called from TurnServer when making a TURN ALLOCATION.
+ // It will create a socket on the `peer_` endpoint.
+ rtc::AsyncPacketSocket* CreateUdpSocket(const rtc::SocketAddress& address,
+ uint16_t min_port,
+ uint16_t max_port) override {
+ return turn_server_->CreatePeerSocket();
+ }
+
+ rtc::AsyncListenSocket* CreateServerTcpSocket(
+ const rtc::SocketAddress& local_address,
+ uint16_t min_port,
+ uint16_t max_port,
+ int opts) override {
+ return nullptr;
+ }
+ rtc::AsyncPacketSocket* CreateClientTcpSocket(
+ const rtc::SocketAddress& local_address,
+ const rtc::SocketAddress& remote_address,
+ const rtc::ProxyInfo& proxy_info,
+ const std::string& user_agent,
+ const rtc::PacketSocketTcpOptions& tcp_options) override {
+ return nullptr;
+ }
+ std::unique_ptr<webrtc::AsyncDnsResolverInterface> CreateAsyncDnsResolver()
+ override {
+ return nullptr;
+ }
+
+ private:
+ webrtc::test::EmulatedTURNServer* turn_server_;
+};
+
+} // namespace
+
+namespace webrtc {
+namespace test {
+
+EmulatedTURNServer::EmulatedTURNServer(std::unique_ptr<rtc::Thread> thread,
+ EmulatedEndpoint* client,
+ EmulatedEndpoint* peer)
+ : thread_(std::move(thread)), client_(client), peer_(peer) {
+ ice_config_.username = "keso";
+ ice_config_.password = "keso";
+ SendTask(thread_.get(), [=]() {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ turn_server_ = std::make_unique<cricket::TurnServer>(thread_.get());
+ turn_server_->set_realm(kTestRealm);
+ turn_server_->set_realm(kTestSoftware);
+ turn_server_->set_auth_hook(this);
+
+ auto client_socket = Wrap(client_);
+ turn_server_->AddInternalSocket(client_socket, cricket::PROTO_UDP);
+ turn_server_->SetExternalSocketFactory(new PacketSocketFactoryWrapper(this),
+ rtc::SocketAddress());
+ client_address_ = client_socket->GetLocalAddress();
+ char buf[256];
+ rtc::SimpleStringBuilder str(buf);
+ str.AppendFormat("turn:%s?transport=udp",
+ client_address_.ToString().c_str());
+ ice_config_.url = str.str();
+ });
+}
+
+void EmulatedTURNServer::Stop() {
+ SendTask(thread_.get(), [=]() {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ sockets_.clear();
+ });
+}
+
+EmulatedTURNServer::~EmulatedTURNServer() {
+ SendTask(thread_.get(), [=]() {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ turn_server_.reset(nullptr);
+ });
+}
+
+rtc::AsyncPacketSocket* EmulatedTURNServer::Wrap(EmulatedEndpoint* endpoint) {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ auto port = endpoint->BindReceiver(0, this).value();
+ auto socket = new AsyncPacketSocketWrapper(this, endpoint, port);
+ sockets_[rtc::SocketAddress(endpoint->GetPeerLocalAddress(), port)] = socket;
+ return socket;
+}
+
+void EmulatedTURNServer::OnPacketReceived(webrtc::EmulatedIpPacket packet) {
+ // Copy from EmulatedEndpoint to rtc::AsyncPacketSocket.
+ thread_->PostTask([this, packet(std::move(packet))]() {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ auto it = sockets_.find(packet.to);
+ if (it != sockets_.end()) {
+ it->second->SignalReadPacket(
+ it->second, reinterpret_cast<const char*>(packet.cdata()),
+ packet.size(), packet.from, packet.arrival_time.ms());
+ }
+ });
+}
+
+void EmulatedTURNServer::Unbind(rtc::SocketAddress address) {
+ RTC_DCHECK_RUN_ON(thread_.get());
+ if (GetClientEndpoint()->GetPeerLocalAddress() == address.ipaddr()) {
+ GetClientEndpoint()->UnbindReceiver(address.port());
+ } else {
+ GetPeerEndpoint()->UnbindReceiver(address.port());
+ }
+ sockets_.erase(address);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/emulated_turn_server.h b/third_party/libwebrtc/test/network/emulated_turn_server.h
new file mode 100644
index 0000000000..9cb0ceabf6
--- /dev/null
+++ b/third_party/libwebrtc/test/network/emulated_turn_server.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_EMULATED_TURN_SERVER_H_
+#define TEST_NETWORK_EMULATED_TURN_SERVER_H_
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/transport/stun.h"
+#include "p2p/base/turn_server.h"
+#include "rtc_base/async_packet_socket.h"
+
+namespace webrtc {
+namespace test {
+
+// EmulatedTURNServer wraps cricket::TurnServer to be used inside
+// a emulated network.
+//
+// Packets from EmulatedEndpoint (client or peer) are received in
+// EmulatedTURNServer::OnPacketReceived which performs a map lookup
+// and delivers them into cricket::TurnServer using
+// AsyncPacketSocket::SignalReadPacket
+//
+// Packets from cricket::TurnServer to EmulatedEndpoint are sent into
+// using a wrapper around AsyncPacketSocket (no lookup required as the
+// wrapper around AsyncPacketSocket keep a pointer to the EmulatedEndpoint).
+class EmulatedTURNServer : public EmulatedTURNServerInterface,
+ public cricket::TurnAuthInterface,
+ public webrtc::EmulatedNetworkReceiverInterface {
+ public:
+ // Create an EmulatedTURNServer.
+ // `thread` is a thread that will be used to run cricket::TurnServer
+ // that expects all calls to be made from a single thread.
+ EmulatedTURNServer(std::unique_ptr<rtc::Thread> thread,
+ EmulatedEndpoint* client,
+ EmulatedEndpoint* peer);
+ ~EmulatedTURNServer() override;
+
+ IceServerConfig GetIceServerConfig() const override { return ice_config_; }
+
+ EmulatedEndpoint* GetClientEndpoint() const override { return client_; }
+
+ rtc::SocketAddress GetClientEndpointAddress() const override {
+ return client_address_;
+ }
+
+ EmulatedEndpoint* GetPeerEndpoint() const override { return peer_; }
+
+ // cricket::TurnAuthInterface
+ bool GetKey(absl::string_view username,
+ absl::string_view realm,
+ std::string* key) override {
+ return cricket::ComputeStunCredentialHash(
+ std::string(username), std::string(realm), std::string(username), key);
+ }
+
+ rtc::AsyncPacketSocket* CreatePeerSocket() { return Wrap(peer_); }
+
+ // This method is called by network emulation when a packet
+ // comes from an emulated link.
+ void OnPacketReceived(webrtc::EmulatedIpPacket packet) override;
+
+ // This is called when the TURN server deletes a socket.
+ void Unbind(rtc::SocketAddress address);
+
+ // Unbind all sockets.
+ void Stop();
+
+ private:
+ std::unique_ptr<rtc::Thread> thread_;
+ rtc::SocketAddress client_address_;
+ IceServerConfig ice_config_;
+ EmulatedEndpoint* const client_;
+ EmulatedEndpoint* const peer_;
+ std::unique_ptr<cricket::TurnServer> turn_server_ RTC_GUARDED_BY(&thread_);
+ std::map<rtc::SocketAddress, rtc::AsyncPacketSocket*> sockets_
+ RTC_GUARDED_BY(&thread_);
+
+ // Wraps a EmulatedEndpoint in a AsyncPacketSocket to bridge interaction
+ // with TurnServer. cricket::TurnServer gets ownership of the socket.
+ rtc::AsyncPacketSocket* Wrap(EmulatedEndpoint* endpoint);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_EMULATED_TURN_SERVER_H_
diff --git a/third_party/libwebrtc/test/network/fake_network_socket_server.cc b/third_party/libwebrtc/test/network/fake_network_socket_server.cc
new file mode 100644
index 0000000000..c94c4e372a
--- /dev/null
+++ b/third_party/libwebrtc/test/network/fake_network_socket_server.cc
@@ -0,0 +1,322 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/fake_network_socket_server.h"
+
+#include <algorithm>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/event.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+std::string ToString(const rtc::SocketAddress& addr) {
+ return addr.HostAsURIString() + ":" + std::to_string(addr.port());
+}
+
+} // namespace
+
+// Represents a socket, which will operate with emulated network.
+class FakeNetworkSocket : public rtc::Socket,
+ public EmulatedNetworkReceiverInterface {
+ public:
+ explicit FakeNetworkSocket(FakeNetworkSocketServer* scoket_manager,
+ rtc::Thread* thread);
+ ~FakeNetworkSocket() override;
+
+ // Will be invoked by EmulatedEndpoint to deliver packets into this socket.
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+
+ // rtc::Socket methods:
+ rtc::SocketAddress GetLocalAddress() const override;
+ rtc::SocketAddress GetRemoteAddress() const override;
+ int Bind(const rtc::SocketAddress& addr) override;
+ int Connect(const rtc::SocketAddress& addr) override;
+ int Close() override;
+ int Send(const void* pv, size_t cb) override;
+ int SendTo(const void* pv,
+ size_t cb,
+ const rtc::SocketAddress& addr) override;
+ int Recv(void* pv, size_t cb, int64_t* timestamp) override;
+ int RecvFrom(void* pv,
+ size_t cb,
+ rtc::SocketAddress* paddr,
+ int64_t* timestamp) override;
+ int Listen(int backlog) override;
+ rtc::Socket* Accept(rtc::SocketAddress* paddr) override;
+ int GetError() const override;
+ void SetError(int error) override;
+ ConnState GetState() const override;
+ int GetOption(Option opt, int* value) override;
+ int SetOption(Option opt, int value) override;
+
+ private:
+ FakeNetworkSocketServer* const socket_server_;
+ rtc::Thread* const thread_;
+ EmulatedEndpointImpl* endpoint_ RTC_GUARDED_BY(&thread_);
+ rtc::SocketAddress local_addr_ RTC_GUARDED_BY(&thread_);
+ rtc::SocketAddress remote_addr_ RTC_GUARDED_BY(&thread_);
+ ConnState state_ RTC_GUARDED_BY(&thread_);
+ int error_ RTC_GUARDED_BY(&thread_);
+ std::map<Option, int> options_map_ RTC_GUARDED_BY(&thread_);
+
+ absl::optional<EmulatedIpPacket> pending_ RTC_GUARDED_BY(thread_);
+ rtc::scoped_refptr<PendingTaskSafetyFlag> alive_;
+};
+
+FakeNetworkSocket::FakeNetworkSocket(FakeNetworkSocketServer* socket_server,
+ rtc::Thread* thread)
+ : socket_server_(socket_server),
+ thread_(thread),
+ state_(CS_CLOSED),
+ error_(0),
+ alive_(PendingTaskSafetyFlag::Create()) {}
+
+FakeNetworkSocket::~FakeNetworkSocket() {
+ // Abandon all pending packets.
+ alive_->SetNotAlive();
+
+ Close();
+ socket_server_->Unregister(this);
+}
+
+void FakeNetworkSocket::OnPacketReceived(EmulatedIpPacket packet) {
+ auto task = [this, packet = std::move(packet)]() mutable {
+ RTC_DCHECK_RUN_ON(thread_);
+ if (!endpoint_->Enabled())
+ return;
+ RTC_DCHECK(!pending_);
+ pending_ = std::move(packet);
+ // Note that we expect that this will trigger exactly one call to RecvFrom()
+ // where pending_packet will be read and reset. This call is done without
+ // any thread switch (see AsyncUDPSocket::OnReadEvent) so it's safe to
+ // assume that SignalReadEvent() will block until the packet has been read.
+ SignalReadEvent(this);
+ RTC_DCHECK(!pending_);
+ };
+ thread_->PostTask(SafeTask(alive_, std::move(task)));
+ socket_server_->WakeUp();
+}
+
+
+rtc::SocketAddress FakeNetworkSocket::GetLocalAddress() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return local_addr_;
+}
+
+rtc::SocketAddress FakeNetworkSocket::GetRemoteAddress() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return remote_addr_;
+}
+
+int FakeNetworkSocket::Bind(const rtc::SocketAddress& addr) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_CHECK(local_addr_.IsNil())
+ << "Socket already bound to address: " << ToString(local_addr_);
+ local_addr_ = addr;
+ endpoint_ = socket_server_->GetEndpointNode(local_addr_.ipaddr());
+ if (!endpoint_) {
+ local_addr_.Clear();
+ RTC_LOG(LS_INFO) << "No endpoint for address: " << ToString(addr);
+ error_ = EADDRNOTAVAIL;
+ return 2;
+ }
+ absl::optional<uint16_t> port =
+ endpoint_->BindReceiver(local_addr_.port(), this);
+ if (!port) {
+ local_addr_.Clear();
+ RTC_LOG(LS_INFO) << "Cannot bind to in-use address: " << ToString(addr);
+ error_ = EADDRINUSE;
+ return 1;
+ }
+ local_addr_.SetPort(port.value());
+ return 0;
+}
+
+int FakeNetworkSocket::Connect(const rtc::SocketAddress& addr) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_CHECK(remote_addr_.IsNil())
+ << "Socket already connected to address: " << ToString(remote_addr_);
+ RTC_CHECK(!local_addr_.IsNil())
+ << "Socket have to be bind to some local address";
+ remote_addr_ = addr;
+ state_ = CS_CONNECTED;
+ return 0;
+}
+
+int FakeNetworkSocket::Send(const void* pv, size_t cb) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_CHECK(state_ == CS_CONNECTED) << "Socket cannot send: not connected";
+ return SendTo(pv, cb, remote_addr_);
+}
+
+int FakeNetworkSocket::SendTo(const void* pv,
+ size_t cb,
+ const rtc::SocketAddress& addr) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_CHECK(!local_addr_.IsNil())
+ << "Socket have to be bind to some local address";
+ if (!endpoint_->Enabled()) {
+ error_ = ENETDOWN;
+ return -1;
+ }
+ rtc::CopyOnWriteBuffer packet(static_cast<const uint8_t*>(pv), cb);
+ endpoint_->SendPacket(local_addr_, addr, packet);
+ return cb;
+}
+
+int FakeNetworkSocket::Recv(void* pv, size_t cb, int64_t* timestamp) {
+ rtc::SocketAddress paddr;
+ return RecvFrom(pv, cb, &paddr, timestamp);
+}
+
+// Reads 1 packet from internal queue. Reads up to `cb` bytes into `pv`
+// and returns the length of received packet.
+int FakeNetworkSocket::RecvFrom(void* pv,
+ size_t cb,
+ rtc::SocketAddress* paddr,
+ int64_t* timestamp) {
+ RTC_DCHECK_RUN_ON(thread_);
+
+ if (timestamp) {
+ *timestamp = -1;
+ }
+ RTC_CHECK(pending_);
+
+ *paddr = pending_->from;
+ size_t data_read = std::min(cb, pending_->size());
+ memcpy(pv, pending_->cdata(), data_read);
+ *timestamp = pending_->arrival_time.us();
+
+ // According to RECV(2) Linux Man page
+ // real socket will discard data, that won't fit into provided buffer,
+ // but we won't to skip such error, so we will assert here.
+ RTC_CHECK(data_read == pending_->size())
+ << "Too small buffer is provided for socket read. "
+ "Received data size: "
+ << pending_->size() << "; Provided buffer size: " << cb;
+
+ pending_.reset();
+
+ // According to RECV(2) Linux Man page
+ // real socket will return message length, not data read. In our case it is
+ // actually the same value.
+ return static_cast<int>(data_read);
+}
+
+int FakeNetworkSocket::Listen(int backlog) {
+ RTC_CHECK(false) << "Listen() isn't valid for SOCK_DGRAM";
+}
+
+rtc::Socket* FakeNetworkSocket::Accept(rtc::SocketAddress* /*paddr*/) {
+ RTC_CHECK(false) << "Accept() isn't valid for SOCK_DGRAM";
+}
+
+int FakeNetworkSocket::Close() {
+ RTC_DCHECK_RUN_ON(thread_);
+ state_ = CS_CLOSED;
+ if (!local_addr_.IsNil()) {
+ endpoint_->UnbindReceiver(local_addr_.port());
+ }
+ local_addr_.Clear();
+ remote_addr_.Clear();
+ return 0;
+}
+
+int FakeNetworkSocket::GetError() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return error_;
+}
+
+void FakeNetworkSocket::SetError(int error) {
+ RTC_DCHECK_RUN_ON(thread_);
+ RTC_CHECK(error == 0);
+ error_ = error;
+}
+
+rtc::Socket::ConnState FakeNetworkSocket::GetState() const {
+ RTC_DCHECK_RUN_ON(thread_);
+ return state_;
+}
+
+int FakeNetworkSocket::GetOption(Option opt, int* value) {
+ RTC_DCHECK_RUN_ON(thread_);
+ auto it = options_map_.find(opt);
+ if (it == options_map_.end()) {
+ return -1;
+ }
+ *value = it->second;
+ return 0;
+}
+
+int FakeNetworkSocket::SetOption(Option opt, int value) {
+ RTC_DCHECK_RUN_ON(thread_);
+ options_map_[opt] = value;
+ return 0;
+}
+
+FakeNetworkSocketServer::FakeNetworkSocketServer(
+ EndpointsContainer* endpoints_container)
+ : endpoints_container_(endpoints_container),
+ wakeup_(/*manual_reset=*/false, /*initially_signaled=*/false) {}
+FakeNetworkSocketServer::~FakeNetworkSocketServer() = default;
+
+EmulatedEndpointImpl* FakeNetworkSocketServer::GetEndpointNode(
+ const rtc::IPAddress& ip) {
+ return endpoints_container_->LookupByLocalAddress(ip);
+}
+
+void FakeNetworkSocketServer::Unregister(FakeNetworkSocket* socket) {
+ MutexLock lock(&lock_);
+ sockets_.erase(absl::c_find(sockets_, socket));
+}
+
+rtc::Socket* FakeNetworkSocketServer::CreateSocket(int family, int type) {
+ RTC_DCHECK(family == AF_INET || family == AF_INET6);
+ // We support only UDP sockets for now.
+ RTC_DCHECK(type == SOCK_DGRAM) << "Only UDP sockets are supported";
+ RTC_DCHECK(thread_) << "must be attached to thread before creating sockets";
+ FakeNetworkSocket* out = new FakeNetworkSocket(this, thread_);
+ {
+ MutexLock lock(&lock_);
+ sockets_.push_back(out);
+ }
+ return out;
+}
+
+void FakeNetworkSocketServer::SetMessageQueue(rtc::Thread* thread) {
+ thread_ = thread;
+}
+
+// Always returns true (if return false, it won't be invoked again...)
+bool FakeNetworkSocketServer::Wait(webrtc::TimeDelta max_wait_duration,
+ bool process_io) {
+ RTC_DCHECK(thread_ == rtc::Thread::Current());
+ if (!max_wait_duration.IsZero())
+ wakeup_.Wait(max_wait_duration);
+
+ return true;
+}
+
+void FakeNetworkSocketServer::WakeUp() {
+ wakeup_.Set();
+}
+
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/fake_network_socket_server.h b/third_party/libwebrtc/test/network/fake_network_socket_server.h
new file mode 100644
index 0000000000..25c85d048a
--- /dev/null
+++ b/third_party/libwebrtc/test/network/fake_network_socket_server.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_FAKE_NETWORK_SOCKET_SERVER_H_
+#define TEST_NETWORK_FAKE_NETWORK_SOCKET_SERVER_H_
+
+#include <set>
+#include <vector>
+
+#include "api/units/timestamp.h"
+#include "rtc_base/event.h"
+#include "rtc_base/socket.h"
+#include "rtc_base/socket_server.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "system_wrappers/include/clock.h"
+#include "test/network/network_emulation.h"
+
+namespace webrtc {
+namespace test {
+class FakeNetworkSocket;
+
+// FakeNetworkSocketServer must outlive any sockets it creates.
+class FakeNetworkSocketServer : public rtc::SocketServer {
+ public:
+ explicit FakeNetworkSocketServer(EndpointsContainer* endpoints_controller);
+ ~FakeNetworkSocketServer() override;
+
+
+ // rtc::SocketFactory methods:
+ rtc::Socket* CreateSocket(int family, int type) override;
+
+ // rtc::SocketServer methods:
+ // Called by the network thread when this server is installed, kicking off the
+ // message handler loop.
+ void SetMessageQueue(rtc::Thread* thread) override;
+ bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override;
+ void WakeUp() override;
+
+ protected:
+ friend class FakeNetworkSocket;
+ EmulatedEndpointImpl* GetEndpointNode(const rtc::IPAddress& ip);
+ void Unregister(FakeNetworkSocket* socket);
+
+ private:
+ const EndpointsContainer* endpoints_container_;
+ rtc::Event wakeup_;
+ rtc::Thread* thread_ = nullptr;
+
+ Mutex lock_;
+ std::vector<FakeNetworkSocket*> sockets_ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_FAKE_NETWORK_SOCKET_SERVER_H_
diff --git a/third_party/libwebrtc/test/network/feedback_generator.cc b/third_party/libwebrtc/test/network/feedback_generator.cc
new file mode 100644
index 0000000000..e339fd87b0
--- /dev/null
+++ b/third_party/libwebrtc/test/network/feedback_generator.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/network/feedback_generator.h"
+
+#include "absl/memory/memory.h"
+#include "api/transport/network_types.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+FeedbackGeneratorImpl::FeedbackGeneratorImpl(
+ FeedbackGeneratorImpl::Config config)
+ : conf_(config),
+ net_(TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault),
+ send_link_{new SimulatedNetwork(conf_.send_link)},
+ ret_link_{new SimulatedNetwork(conf_.return_link)},
+ route_(this,
+ net_.CreateRoute(
+ {net_.CreateEmulatedNode(absl::WrapUnique(send_link_))}),
+ net_.CreateRoute(
+ {net_.CreateEmulatedNode(absl::WrapUnique(ret_link_))})) {}
+
+Timestamp FeedbackGeneratorImpl::Now() {
+ return net_.Now();
+}
+
+void FeedbackGeneratorImpl::Sleep(TimeDelta duration) {
+ net_.time_controller()->AdvanceTime(duration);
+}
+
+void FeedbackGeneratorImpl::SendPacket(size_t size) {
+ SentPacket sent;
+ sent.send_time = Now();
+ sent.size = DataSize::Bytes(size);
+ sent.sequence_number = sequence_number_++;
+ sent_packets_.push(sent);
+ route_.SendRequest(size, sent);
+}
+
+std::vector<TransportPacketsFeedback> FeedbackGeneratorImpl::PopFeedback() {
+ std::vector<TransportPacketsFeedback> ret;
+ ret.swap(feedback_);
+ return ret;
+}
+
+void FeedbackGeneratorImpl::SetSendConfig(BuiltInNetworkBehaviorConfig config) {
+ conf_.send_link = config;
+ send_link_->SetConfig(conf_.send_link);
+}
+
+void FeedbackGeneratorImpl::SetReturnConfig(
+ BuiltInNetworkBehaviorConfig config) {
+ conf_.return_link = config;
+ ret_link_->SetConfig(conf_.return_link);
+}
+
+void FeedbackGeneratorImpl::SetSendLinkCapacity(DataRate capacity) {
+ conf_.send_link.link_capacity_kbps = capacity.kbps<int>();
+ send_link_->SetConfig(conf_.send_link);
+}
+
+void FeedbackGeneratorImpl::OnRequest(SentPacket packet,
+ Timestamp arrival_time) {
+ PacketResult result;
+ result.sent_packet = packet;
+ result.receive_time = arrival_time;
+ received_packets_.push_back(result);
+ Timestamp first_recv = received_packets_.front().receive_time;
+ if (Now() - first_recv > conf_.feedback_interval) {
+ route_.SendResponse(conf_.feedback_packet_size.bytes<size_t>(),
+ std::move(received_packets_));
+ received_packets_ = {};
+ }
+}
+
+void FeedbackGeneratorImpl::OnResponse(std::vector<PacketResult> packet_results,
+ Timestamp arrival_time) {
+ TransportPacketsFeedback feedback;
+ feedback.feedback_time = arrival_time;
+ std::vector<PacketResult>::const_iterator received_packet_iterator =
+ packet_results.begin();
+ while (received_packet_iterator != packet_results.end()) {
+ RTC_DCHECK(!sent_packets_.empty() &&
+ sent_packets_.front().sequence_number <=
+ received_packet_iterator->sent_packet.sequence_number)
+ << "reordering not implemented";
+ if (sent_packets_.front().sequence_number <
+ received_packet_iterator->sent_packet.sequence_number) {
+ // Packet lost.
+ PacketResult lost;
+ lost.sent_packet = sent_packets_.front();
+ feedback.packet_feedbacks.push_back(lost);
+ }
+ if (sent_packets_.front().sequence_number ==
+ received_packet_iterator->sent_packet.sequence_number) {
+ feedback.packet_feedbacks.push_back(*received_packet_iterator);
+ ++received_packet_iterator;
+ }
+ sent_packets_.pop();
+ }
+ feedback_.push_back(feedback);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/feedback_generator.h b/third_party/libwebrtc/test/network/feedback_generator.h
new file mode 100644
index 0000000000..ecd4597d3f
--- /dev/null
+++ b/third_party/libwebrtc/test/network/feedback_generator.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_NETWORK_FEEDBACK_GENERATOR_H_
+#define TEST_NETWORK_FEEDBACK_GENERATOR_H_
+
+#include <cstdint>
+#include <queue>
+#include <utility>
+#include <vector>
+
+#include "api/transport/network_types.h"
+#include "api/transport/test/feedback_generator_interface.h"
+#include "call/simulated_network.h"
+#include "test/network/network_emulation.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+class FeedbackGeneratorImpl
+ : public FeedbackGenerator,
+ public TwoWayFakeTrafficRoute<SentPacket, std::vector<PacketResult>>::
+ TrafficHandlerInterface {
+ public:
+ explicit FeedbackGeneratorImpl(Config config);
+ Timestamp Now() override;
+ void Sleep(TimeDelta duration) override;
+ void SendPacket(size_t size) override;
+ std::vector<TransportPacketsFeedback> PopFeedback() override;
+
+ void SetSendConfig(BuiltInNetworkBehaviorConfig config) override;
+ void SetReturnConfig(BuiltInNetworkBehaviorConfig config) override;
+
+ void SetSendLinkCapacity(DataRate capacity) override;
+
+ void OnRequest(SentPacket packet, Timestamp arrival_time) override;
+ void OnResponse(std::vector<PacketResult> packet_results,
+ Timestamp arrival_time) override;
+
+ private:
+ Config conf_;
+ ::webrtc::test::NetworkEmulationManagerImpl net_;
+ SimulatedNetwork* const send_link_;
+ SimulatedNetwork* const ret_link_;
+ TwoWayFakeTrafficRoute<SentPacket, std::vector<PacketResult>> route_;
+
+ std::queue<SentPacket> sent_packets_;
+ std::vector<PacketResult> received_packets_;
+ std::vector<TransportPacketsFeedback> feedback_;
+ int64_t sequence_number_ = 1;
+};
+} // namespace webrtc
+#endif // TEST_NETWORK_FEEDBACK_GENERATOR_H_
diff --git a/third_party/libwebrtc/test/network/feedback_generator_unittest.cc b/third_party/libwebrtc/test/network/feedback_generator_unittest.cc
new file mode 100644
index 0000000000..9a577bea00
--- /dev/null
+++ b/third_party/libwebrtc/test/network/feedback_generator_unittest.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/transport/test/create_feedback_generator.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+TEST(FeedbackGeneratorTest, ReportsFeedbackForSentPackets) {
+ size_t kPacketSize = 1000;
+ auto gen = CreateFeedbackGenerator(FeedbackGenerator::Config());
+ for (int i = 0; i < 10; ++i) {
+ gen->SendPacket(kPacketSize);
+ gen->Sleep(TimeDelta::Millis(50));
+ }
+ auto feedback_list = gen->PopFeedback();
+ EXPECT_GT(feedback_list.size(), 0u);
+ for (const auto& feedback : feedback_list) {
+ EXPECT_GT(feedback.packet_feedbacks.size(), 0u);
+ for (const auto& packet : feedback.packet_feedbacks) {
+ EXPECT_EQ(packet.sent_packet.size.bytes<size_t>(), kPacketSize);
+ }
+ }
+}
+
+TEST(FeedbackGeneratorTest, FeedbackIncludesLostPackets) {
+ size_t kPacketSize = 1000;
+ auto gen = CreateFeedbackGenerator(FeedbackGenerator::Config());
+ BuiltInNetworkBehaviorConfig send_config_with_loss;
+ send_config_with_loss.loss_percent = 50;
+ gen->SetSendConfig(send_config_with_loss);
+ for (int i = 0; i < 20; ++i) {
+ gen->SendPacket(kPacketSize);
+ gen->Sleep(TimeDelta::Millis(5));
+ }
+ auto feedback_list = gen->PopFeedback();
+ ASSERT_GT(feedback_list.size(), 0u);
+ EXPECT_NEAR(feedback_list[0].LostWithSendInfo().size(),
+ feedback_list[0].ReceivedWithSendInfo().size(), 2);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/g3doc/index.md b/third_party/libwebrtc/test/network/g3doc/index.md
new file mode 100644
index 0000000000..c82b56445e
--- /dev/null
+++ b/third_party/libwebrtc/test/network/g3doc/index.md
@@ -0,0 +1,137 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'titovartem' reviewed: '2021-03-01'} *-->
+
+# Network Emulation Framework
+
+[TOC]
+
+## Disclamer
+
+This documentation explain the implementation details of Network Emulation
+Framework. Framework's public APIs are located in:
+
+* [`/api/test/network_emulation_manager.h`](https://source.chromium.org/search?q=%2Fapi%2Ftest%2Fnetwork_emulation_manager.h)
+* [`/api/test/create_network_emulation_manager.h`](https://source.chromium.org/search?q=%2Fapi%2Ftest%2Fcreate_network_emulation_manager.h)
+* [`/api/test/network_emulation/network_emulation_interfaces.h`](https://source.chromium.org/search?q=%2Fapi%2Ftest%2Fnetwork_emulation%2Fnetwork_emulation_interfaces.h)
+* [`/api/test/simulated_network.h`](https://source.chromium.org/search?q=%2Fapi%2Ftest%2Fsimulated_network.h)
+
+## Overview
+
+Network Emulation Framework provides an ability to emulate network behavior
+between different clients, including a WebRTC PeerConnection client. To
+configure network behavior, the user can choose different options:
+
+* Use predefined implementation that can be configured with parameters such as
+ packet loss, bandwidth, delay, etc.
+* Custom implementation
+
+Conceptually the framework provides the ability to define multiple endpoints and
+routes used to connect them. All network related entities are created and
+managed by single factory class `webrtc::NetworkEmulationManager` which is
+implemented by `webrtc::test::NetworkEmulationManagerImpl` and can work in two
+modes:
+
+* Real time
+* Simulated time
+
+The manager has a dedicated task queue which pipes all packets through all
+network routes from senders to receivers. This task queue behaviour is
+determined by `webrtc::TimeController`, which is based on either in real time or
+simulated time mode.
+
+The network operates on IP level and supports only UDP for now.
+
+## Abstractions
+
+The framework contains the following public abstractions:
+
+* `webrtc::NetworkBehaviorInterface` - defines how emulated network should
+ behave. It operates on packets metadata level and is responsible for telling
+ which packet at which time have to be delivered to the next receiver.
+
+* `webrtc::EmulatedIpPacket` - represents a single packet that can be sent or
+ received via emulated network. It has source and destination address and
+ payload to transfer.
+
+* `webrtc::EmulatedNetworkReceiverInterface` - generic packet receiver
+ interface.
+
+* `webrtc::EmulatedEndpoint` - primary user facing abstraction of the
+ framework. It represents a network interface on client's machine. It has its
+ own unique IP address and can be used to send and receive packets.
+
+ `EmulatedEndpoint` implements `EmulatedNetworkReceiverInterface` to receive
+ packets from the network and provides an API to send packets to the network
+ and API to bind other `EmulatedNetworkReceiverInterface` which will be able
+ to receive packets from the endpoint. `EmulatedEndpoint` interface has the
+ only implementation: `webrtc::test::EmulatedEndpointImpl`.
+
+* `webrtc::EmulatedNetworkNode` - represents single network in the real world,
+ like a 3G network between peers, or Wi-Fi for one peer and LTE for another.
+ Each `EmulatedNetworkNode` is a single direction connetion and to form
+ bidirectional connection between endpoints two nodes should be used.
+ Multiple nodes can be joined into chain emulating a network path from one
+ peer to another.
+
+ In public API this class is forward declared and fully accessible only by
+ the framework implementation.
+
+ Internally consist of two parts: `LinkEmulation`, which is responsible for
+ behavior of current `EmulatedNetworkNode` and `NetworkRouterNode` which is
+ responsible for routing packets to the next node or to the endpoint.
+
+* `webrtc::EmulatedRoute` - represents single route from one network interface
+ on one device to another network interface on another device.
+
+ In public API this class is forward declared and fully accessible only by
+ the framework implementation.
+
+ It contains start and end endpoint and ordered list of `EmulatedNetworkNode`
+ which forms the single directional route between those endpoints.
+
+The framework has also the following private abstractions:
+
+* `webrtc::test::NetworkRouterNode` - an `EmulatedNetworkReceiverInterface`
+ that can route incoming packets to the next receiver based on internal IP
+ routing table.
+
+* `webrtc::test::LinkEmulation` - an `EmulatedNetworkReceiverInterface` that
+ can emulate network leg behavior via `webrtc::NetworkBehaviorInterface`
+ interface.
+
+For integrating with `webrtc::PeerConnection` there are helper abstractions:
+
+* `webrtc::EmulatedNetworkManagerInterface` which is implemented by
+ `webrtc::test::EmulatedNetworkManager` and provides `rtc::Thread` and
+ `rtc::NetworkManager` for WebRTC to use as network thread for
+ `PeerConnection` and for `cricket::BasicPortAllocator`.
+
+ Implementation represent framework endpoints as `rtc::Network` to WebRTC.
+
+## Architecture
+
+Let's take a look on how framework's abstractions are connected to each other.
+
+When the user wants to setup emulated network, first of all, they should create
+an instance of `NetworkEmulationManager` using
+`webrtc::CreateNetworkEmulationManager(...)` API. Then user should use a manager
+to create at least one `EmulatedEndpoint` for each client. After endpoints, the
+user should create required `EmulatedNetworkNode`s and with help of manager
+chain them into `EmulatedRoute`s conecting desired endpoints.
+
+Here is a visual overview of the emulated network architecture:
+
+![Architecture](network_emulation_framework.png "Architecture")
+
+When network is hooked into `PeerConnection` it is done through network thread
+and `NetworkManager`. In the network thread the custom `rtc::SocketServer` is
+provided: `webrtc::test::FakeNetworkSocketServer`. This custom socket server
+will construct custom sockets (`webrtc::test::FakeNetworkSocket`), which
+internally bind themselves to the required endpoint. All packets processing
+inside socket have to be done on the `PeerConnection`'s network thread. When
+packet is going from `PeerConnection` to the network it's already comming from
+the network thread and when it's comming from the emulated network switch from
+the Network Emulation Framework internal task queue and `PeerConnection`'s
+network thread is done inside socket's `OnPacketReceived(...)` method.
+
+![Network Injection](network_injection_into_peer_connection.png "Network Injection")
diff --git a/third_party/libwebrtc/test/network/g3doc/network_emulation_framework.png b/third_party/libwebrtc/test/network/g3doc/network_emulation_framework.png
new file mode 100644
index 0000000000..afec47773f
--- /dev/null
+++ b/third_party/libwebrtc/test/network/g3doc/network_emulation_framework.png
Binary files differ
diff --git a/third_party/libwebrtc/test/network/g3doc/network_injection_into_peer_connection.png b/third_party/libwebrtc/test/network/g3doc/network_injection_into_peer_connection.png
new file mode 100644
index 0000000000..c9e3bf8baf
--- /dev/null
+++ b/third_party/libwebrtc/test/network/g3doc/network_injection_into_peer_connection.png
Binary files differ
diff --git a/third_party/libwebrtc/test/network/network_emulation.cc b/third_party/libwebrtc/test/network/network_emulation.cc
new file mode 100644
index 0000000000..f1c9ca80dd
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation.cc
@@ -0,0 +1,767 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/network_emulation.h"
+
+#include <algorithm>
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/sequence_checker.h"
+#include "api/test/network_emulation/network_emulation_interfaces.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+EmulatedNetworkOutgoingStats GetOverallOutgoingStats(
+ const std::map<rtc::IPAddress, EmulatedNetworkOutgoingStats>&
+ outgoing_stats,
+ EmulatedNetworkStatsGatheringMode mode) {
+ EmulatedNetworkOutgoingStatsBuilder builder(mode);
+ for (const auto& entry : outgoing_stats) {
+ builder.AddOutgoingStats(entry.second);
+ }
+ return builder.Build();
+}
+
+EmulatedNetworkIncomingStats GetOverallIncomingStats(
+ const std::map<rtc::IPAddress, EmulatedNetworkIncomingStats>&
+ incoming_stats,
+ EmulatedNetworkStatsGatheringMode mode) {
+ EmulatedNetworkIncomingStatsBuilder builder(mode);
+ for (const auto& entry : incoming_stats) {
+ builder.AddIncomingStats(entry.second);
+ }
+ return builder.Build();
+}
+
+} // namespace
+
+EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : stats_gathering_mode_(stats_gathering_mode) {
+ sequence_checker_.Detach();
+}
+
+void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent(Timestamp sent_time,
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_CHECK_GE(packet_size, DataSize::Zero());
+ if (stats_.first_packet_sent_time.IsInfinite()) {
+ stats_.first_packet_sent_time = sent_time;
+ stats_.first_sent_packet_size = packet_size;
+ }
+ stats_.last_packet_sent_time = sent_time;
+ stats_.packets_sent++;
+ stats_.bytes_sent += packet_size;
+ if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) {
+ stats_.sent_packets_size.AddSample(packet_size.bytes());
+ }
+}
+
+void EmulatedNetworkOutgoingStatsBuilder::AddOutgoingStats(
+ const EmulatedNetworkOutgoingStats& stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stats_.packets_sent += stats.packets_sent;
+ stats_.bytes_sent += stats.bytes_sent;
+ stats_.sent_packets_size.AddSamples(stats.sent_packets_size);
+ if (stats_.first_packet_sent_time > stats.first_packet_sent_time) {
+ stats_.first_packet_sent_time = stats.first_packet_sent_time;
+ stats_.first_sent_packet_size = stats.first_sent_packet_size;
+ }
+ if (stats_.last_packet_sent_time < stats.last_packet_sent_time) {
+ stats_.last_packet_sent_time = stats.last_packet_sent_time;
+ }
+}
+
+EmulatedNetworkOutgoingStats EmulatedNetworkOutgoingStatsBuilder::Build()
+ const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return stats_;
+}
+
+EmulatedNetworkIncomingStatsBuilder::EmulatedNetworkIncomingStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : stats_gathering_mode_(stats_gathering_mode) {
+ sequence_checker_.Detach();
+}
+
+void EmulatedNetworkIncomingStatsBuilder::OnPacketDropped(
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stats_.packets_discarded_no_receiver++;
+ stats_.bytes_discarded_no_receiver += packet_size;
+ if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) {
+ stats_.packets_discarded_no_receiver_size.AddSample(packet_size.bytes());
+ }
+}
+
+void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived(
+ Timestamp received_time,
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_CHECK_GE(packet_size, DataSize::Zero());
+ if (stats_.first_packet_received_time.IsInfinite()) {
+ stats_.first_packet_received_time = received_time;
+ stats_.first_received_packet_size = packet_size;
+ }
+ stats_.last_packet_received_time = received_time;
+ stats_.packets_received++;
+ stats_.bytes_received += packet_size;
+ if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) {
+ stats_.received_packets_size.AddSample(packet_size.bytes());
+ }
+}
+
+void EmulatedNetworkIncomingStatsBuilder::AddIncomingStats(
+ const EmulatedNetworkIncomingStats& stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stats_.packets_received += stats.packets_received;
+ stats_.bytes_received += stats.bytes_received;
+ stats_.received_packets_size.AddSamples(stats.received_packets_size);
+ stats_.packets_discarded_no_receiver += stats.packets_discarded_no_receiver;
+ stats_.bytes_discarded_no_receiver += stats.bytes_discarded_no_receiver;
+ stats_.packets_discarded_no_receiver_size.AddSamples(
+ stats.packets_discarded_no_receiver_size);
+ if (stats_.first_packet_received_time > stats.first_packet_received_time) {
+ stats_.first_packet_received_time = stats.first_packet_received_time;
+ stats_.first_received_packet_size = stats.first_received_packet_size;
+ }
+ if (stats_.last_packet_received_time < stats.last_packet_received_time) {
+ stats_.last_packet_received_time = stats.last_packet_received_time;
+ }
+}
+
+EmulatedNetworkIncomingStats EmulatedNetworkIncomingStatsBuilder::Build()
+ const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return stats_;
+}
+
+EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : stats_gathering_mode_(stats_gathering_mode) {
+ sequence_checker_.Detach();
+}
+
+EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder(
+ rtc::IPAddress local_ip,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : stats_gathering_mode_(stats_gathering_mode) {
+ local_addresses_.push_back(local_ip);
+ sequence_checker_.Detach();
+}
+
+void EmulatedNetworkStatsBuilder::OnPacketSent(Timestamp queued_time,
+ Timestamp sent_time,
+ rtc::IPAddress destination_ip,
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) {
+ sent_packets_queue_wait_time_us_.AddSample((sent_time - queued_time).us());
+ }
+ auto it = outgoing_stats_per_destination_.find(destination_ip);
+ if (it == outgoing_stats_per_destination_.end()) {
+ outgoing_stats_per_destination_
+ .emplace(destination_ip,
+ std::make_unique<EmulatedNetworkOutgoingStatsBuilder>(
+ stats_gathering_mode_))
+ .first->second->OnPacketSent(sent_time, packet_size);
+ } else {
+ it->second->OnPacketSent(sent_time, packet_size);
+ }
+}
+
+void EmulatedNetworkStatsBuilder::OnPacketDropped(rtc::IPAddress source_ip,
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = incoming_stats_per_source_.find(source_ip);
+ if (it == incoming_stats_per_source_.end()) {
+ incoming_stats_per_source_
+ .emplace(source_ip,
+ std::make_unique<EmulatedNetworkIncomingStatsBuilder>(
+ stats_gathering_mode_))
+ .first->second->OnPacketDropped(packet_size);
+ } else {
+ it->second->OnPacketDropped(packet_size);
+ }
+}
+
+void EmulatedNetworkStatsBuilder::OnPacketReceived(Timestamp received_time,
+ rtc::IPAddress source_ip,
+ DataSize packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ auto it = incoming_stats_per_source_.find(source_ip);
+ if (it == incoming_stats_per_source_.end()) {
+ incoming_stats_per_source_
+ .emplace(source_ip,
+ std::make_unique<EmulatedNetworkIncomingStatsBuilder>(
+ stats_gathering_mode_))
+ .first->second->OnPacketReceived(received_time, packet_size);
+ } else {
+ it->second->OnPacketReceived(received_time, packet_size);
+ }
+}
+
+void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats(
+ const EmulatedNetworkStats& stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+
+ // Append IPs from other endpoints stats to the builder.
+ for (const rtc::IPAddress& addr : stats.local_addresses) {
+ local_addresses_.push_back(addr);
+ }
+
+ sent_packets_queue_wait_time_us_.AddSamples(
+ stats.sent_packets_queue_wait_time_us);
+
+ // Add outgoing stats from other endpoints to the builder.
+ for (const auto& entry : stats.outgoing_stats_per_destination) {
+ auto it = outgoing_stats_per_destination_.find(entry.first);
+ if (it == outgoing_stats_per_destination_.end()) {
+ outgoing_stats_per_destination_
+ .emplace(entry.first,
+ std::make_unique<EmulatedNetworkOutgoingStatsBuilder>(
+ stats_gathering_mode_))
+ .first->second->AddOutgoingStats(entry.second);
+ } else {
+ it->second->AddOutgoingStats(entry.second);
+ }
+ }
+
+ // Add incoming stats from other endpoints to the builder.
+ for (const auto& entry : stats.incoming_stats_per_source) {
+ auto it = incoming_stats_per_source_.find(entry.first);
+ if (it == incoming_stats_per_source_.end()) {
+ incoming_stats_per_source_
+ .emplace(entry.first,
+ std::make_unique<EmulatedNetworkIncomingStatsBuilder>(
+ stats_gathering_mode_))
+ .first->second->AddIncomingStats(entry.second);
+ } else {
+ it->second->AddIncomingStats(entry.second);
+ }
+ }
+}
+
+EmulatedNetworkStats EmulatedNetworkStatsBuilder::Build() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ std::map<rtc::IPAddress, EmulatedNetworkOutgoingStats> outgoing_stats;
+ for (const auto& entry : outgoing_stats_per_destination_) {
+ outgoing_stats.emplace(entry.first, entry.second->Build());
+ }
+ std::map<rtc::IPAddress, EmulatedNetworkIncomingStats> incoming_stats;
+ for (const auto& entry : incoming_stats_per_source_) {
+ incoming_stats.emplace(entry.first, entry.second->Build());
+ }
+ return EmulatedNetworkStats{
+ .local_addresses = local_addresses_,
+ .overall_outgoing_stats =
+ GetOverallOutgoingStats(outgoing_stats, stats_gathering_mode_),
+ .overall_incoming_stats =
+ GetOverallIncomingStats(incoming_stats, stats_gathering_mode_),
+ .outgoing_stats_per_destination = std::move(outgoing_stats),
+ .incoming_stats_per_source = std::move(incoming_stats),
+ .sent_packets_queue_wait_time_us = sent_packets_queue_wait_time_us_};
+}
+
+EmulatedNetworkNodeStatsBuilder::EmulatedNetworkNodeStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : stats_gathering_mode_(stats_gathering_mode) {
+ sequence_checker_.Detach();
+}
+
+void EmulatedNetworkNodeStatsBuilder::AddPacketTransportTime(
+ TimeDelta time,
+ size_t packet_size) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) {
+ stats_.packet_transport_time.AddSample(time.ms<double>());
+ stats_.size_to_packet_transport_time.AddSample(packet_size /
+ time.ms<double>());
+ }
+}
+
+void EmulatedNetworkNodeStatsBuilder::AddEmulatedNetworkNodeStats(
+ const EmulatedNetworkNodeStats& stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ stats_.packet_transport_time.AddSamples(stats.packet_transport_time);
+ stats_.size_to_packet_transport_time.AddSamples(
+ stats.size_to_packet_transport_time);
+}
+
+EmulatedNetworkNodeStats EmulatedNetworkNodeStatsBuilder::Build() const {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ return stats_;
+}
+
+void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) {
+ task_queue_->PostTask([this, packet = std::move(packet)]() mutable {
+ RTC_DCHECK_RUN_ON(task_queue_);
+
+ uint64_t packet_id = next_packet_id_++;
+ bool sent = network_behavior_->EnqueuePacket(PacketInFlightInfo(
+ packet.ip_packet_size(), packet.arrival_time.us(), packet_id));
+ if (sent) {
+ packets_.emplace_back(StoredPacket{.id = packet_id,
+ .sent_time = clock_->CurrentTime(),
+ .packet = std::move(packet),
+ .removed = false});
+ }
+ if (process_task_.Running())
+ return;
+ absl::optional<int64_t> next_time_us =
+ network_behavior_->NextDeliveryTimeUs();
+ if (!next_time_us)
+ return;
+ Timestamp current_time = clock_->CurrentTime();
+ process_task_ = RepeatingTaskHandle::DelayedStart(
+ task_queue_->Get(),
+ std::max(TimeDelta::Zero(),
+ Timestamp::Micros(*next_time_us) - current_time),
+ [this]() {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ Timestamp current_time = clock_->CurrentTime();
+ Process(current_time);
+ absl::optional<int64_t> next_time_us =
+ network_behavior_->NextDeliveryTimeUs();
+ if (!next_time_us) {
+ process_task_.Stop();
+ return TimeDelta::Zero(); // This is ignored.
+ }
+ RTC_DCHECK_GE(*next_time_us, current_time.us());
+ return Timestamp::Micros(*next_time_us) - current_time;
+ });
+ });
+}
+
+EmulatedNetworkNodeStats LinkEmulation::stats() const {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ return stats_builder_.Build();
+}
+
+void LinkEmulation::Process(Timestamp at_time) {
+ std::vector<PacketDeliveryInfo> delivery_infos =
+ network_behavior_->DequeueDeliverablePackets(at_time.us());
+ for (PacketDeliveryInfo& delivery_info : delivery_infos) {
+ StoredPacket* packet = nullptr;
+ for (auto& stored_packet : packets_) {
+ if (stored_packet.id == delivery_info.packet_id) {
+ packet = &stored_packet;
+ break;
+ }
+ }
+ RTC_CHECK(packet);
+ RTC_DCHECK(!packet->removed);
+ packet->removed = true;
+ stats_builder_.AddPacketTransportTime(
+ clock_->CurrentTime() - packet->sent_time,
+ packet->packet.ip_packet_size());
+
+ if (delivery_info.receive_time_us != PacketDeliveryInfo::kNotReceived) {
+ packet->packet.arrival_time =
+ Timestamp::Micros(delivery_info.receive_time_us);
+ receiver_->OnPacketReceived(std::move(packet->packet));
+ }
+ while (!packets_.empty() && packets_.front().removed) {
+ packets_.pop_front();
+ }
+ }
+}
+
+NetworkRouterNode::NetworkRouterNode(rtc::TaskQueue* task_queue)
+ : task_queue_(task_queue) {}
+
+void NetworkRouterNode::OnPacketReceived(EmulatedIpPacket packet) {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ if (watcher_) {
+ watcher_(packet);
+ }
+ if (filter_) {
+ if (!filter_(packet))
+ return;
+ }
+ auto receiver_it = routing_.find(packet.to.ipaddr());
+ if (receiver_it == routing_.end()) {
+ if (default_receiver_.has_value()) {
+ (*default_receiver_)->OnPacketReceived(std::move(packet));
+ }
+ return;
+ }
+ RTC_CHECK(receiver_it != routing_.end());
+
+ receiver_it->second->OnPacketReceived(std::move(packet));
+}
+
+void NetworkRouterNode::SetReceiver(
+ const rtc::IPAddress& dest_ip,
+ EmulatedNetworkReceiverInterface* receiver) {
+ task_queue_->PostTask([=] {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ EmulatedNetworkReceiverInterface* cur_receiver = routing_[dest_ip];
+ RTC_CHECK(cur_receiver == nullptr || cur_receiver == receiver)
+ << "Routing for dest_ip=" << dest_ip.ToString() << " already exists";
+ routing_[dest_ip] = receiver;
+ });
+}
+
+void NetworkRouterNode::RemoveReceiver(const rtc::IPAddress& dest_ip) {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ routing_.erase(dest_ip);
+}
+
+void NetworkRouterNode::SetDefaultReceiver(
+ EmulatedNetworkReceiverInterface* receiver) {
+ task_queue_->PostTask([=] {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ if (default_receiver_.has_value()) {
+ RTC_CHECK_EQ(*default_receiver_, receiver)
+ << "Router already default receiver";
+ }
+ default_receiver_ = receiver;
+ });
+}
+
+void NetworkRouterNode::RemoveDefaultReceiver() {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ default_receiver_ = absl::nullopt;
+}
+
+void NetworkRouterNode::SetWatcher(
+ std::function<void(const EmulatedIpPacket&)> watcher) {
+ task_queue_->PostTask([=] {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ watcher_ = watcher;
+ });
+}
+
+void NetworkRouterNode::SetFilter(
+ std::function<bool(const EmulatedIpPacket&)> filter) {
+ task_queue_->PostTask([=] {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ filter_ = filter;
+ });
+}
+
+EmulatedNetworkNode::EmulatedNetworkNode(
+ Clock* clock,
+ rtc::TaskQueue* task_queue,
+ std::unique_ptr<NetworkBehaviorInterface> network_behavior,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : router_(task_queue),
+ link_(clock,
+ task_queue,
+ std::move(network_behavior),
+ &router_,
+ stats_gathering_mode) {}
+
+void EmulatedNetworkNode::OnPacketReceived(EmulatedIpPacket packet) {
+ link_.OnPacketReceived(std::move(packet));
+}
+
+EmulatedNetworkNodeStats EmulatedNetworkNode::stats() const {
+ return link_.stats();
+}
+
+void EmulatedNetworkNode::CreateRoute(
+ const rtc::IPAddress& receiver_ip,
+ std::vector<EmulatedNetworkNode*> nodes,
+ EmulatedNetworkReceiverInterface* receiver) {
+ RTC_CHECK(!nodes.empty());
+ for (size_t i = 0; i + 1 < nodes.size(); ++i)
+ nodes[i]->router()->SetReceiver(receiver_ip, nodes[i + 1]);
+ nodes.back()->router()->SetReceiver(receiver_ip, receiver);
+}
+
+void EmulatedNetworkNode::ClearRoute(const rtc::IPAddress& receiver_ip,
+ std::vector<EmulatedNetworkNode*> nodes) {
+ for (EmulatedNetworkNode* node : nodes)
+ node->router()->RemoveReceiver(receiver_ip);
+}
+
+EmulatedNetworkNode::~EmulatedNetworkNode() = default;
+
+EmulatedEndpointImpl::Options::Options(
+ uint64_t id,
+ const rtc::IPAddress& ip,
+ const EmulatedEndpointConfig& config,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : id(id),
+ ip(ip),
+ stats_gathering_mode(stats_gathering_mode),
+ type(config.type),
+ allow_send_packet_with_different_source_ip(
+ config.allow_send_packet_with_different_source_ip),
+ allow_receive_packets_with_different_dest_ip(
+ config.allow_receive_packets_with_different_dest_ip),
+ log_name(ip.ToString() + " (" + config.name.value_or("") + ")") {}
+
+EmulatedEndpointImpl::EmulatedEndpointImpl(const Options& options,
+ bool is_enabled,
+ rtc::TaskQueue* task_queue,
+ Clock* clock)
+ : options_(options),
+ is_enabled_(is_enabled),
+ clock_(clock),
+ task_queue_(task_queue),
+ router_(task_queue_),
+ next_port_(kFirstEphemeralPort),
+ stats_builder_(options_.ip, options_.stats_gathering_mode) {
+ constexpr int kIPv4NetworkPrefixLength = 24;
+ constexpr int kIPv6NetworkPrefixLength = 64;
+
+ int prefix_length = 0;
+ if (options_.ip.family() == AF_INET) {
+ prefix_length = kIPv4NetworkPrefixLength;
+ } else if (options_.ip.family() == AF_INET6) {
+ prefix_length = kIPv6NetworkPrefixLength;
+ }
+ rtc::IPAddress prefix = TruncateIP(options_.ip, prefix_length);
+ network_ = std::make_unique<rtc::Network>(
+ options_.ip.ToString(), "Endpoint id=" + std::to_string(options_.id),
+ prefix, prefix_length, options_.type);
+ network_->AddIP(options_.ip);
+
+ enabled_state_checker_.Detach();
+ RTC_LOG(LS_INFO) << "Created emulated endpoint " << options_.log_name
+ << "; id=" << options_.id;
+}
+EmulatedEndpointImpl::~EmulatedEndpointImpl() = default;
+
+uint64_t EmulatedEndpointImpl::GetId() const {
+ return options_.id;
+}
+
+void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from,
+ const rtc::SocketAddress& to,
+ rtc::CopyOnWriteBuffer packet_data,
+ uint16_t application_overhead) {
+ if (!options_.allow_send_packet_with_different_source_ip) {
+ RTC_CHECK(from.ipaddr() == options_.ip);
+ }
+ EmulatedIpPacket packet(from, to, std::move(packet_data),
+ clock_->CurrentTime(), application_overhead);
+ task_queue_->PostTask([this, packet = std::move(packet)]() mutable {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ stats_builder_.OnPacketSent(packet.arrival_time, clock_->CurrentTime(),
+ packet.to.ipaddr(),
+ DataSize::Bytes(packet.ip_packet_size()));
+
+ if (packet.to.ipaddr() == options_.ip) {
+ OnPacketReceived(std::move(packet));
+ } else {
+ router_.OnPacketReceived(std::move(packet));
+ }
+ });
+}
+
+absl::optional<uint16_t> EmulatedEndpointImpl::BindReceiver(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver) {
+ return BindReceiverInternal(desired_port, receiver, /*is_one_shot=*/false);
+}
+
+absl::optional<uint16_t> EmulatedEndpointImpl::BindOneShotReceiver(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver) {
+ return BindReceiverInternal(desired_port, receiver, /*is_one_shot=*/true);
+}
+
+absl::optional<uint16_t> EmulatedEndpointImpl::BindReceiverInternal(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver,
+ bool is_one_shot) {
+ MutexLock lock(&receiver_lock_);
+ uint16_t port = desired_port;
+ if (port == 0) {
+ // Because client can specify its own port, next_port_ can be already in
+ // use, so we need to find next available port.
+ int ports_pool_size =
+ std::numeric_limits<uint16_t>::max() - kFirstEphemeralPort + 1;
+ for (int i = 0; i < ports_pool_size; ++i) {
+ uint16_t next_port = NextPort();
+ if (port_to_receiver_.find(next_port) == port_to_receiver_.end()) {
+ port = next_port;
+ break;
+ }
+ }
+ }
+ RTC_CHECK(port != 0) << "Can't find free port for receiver in endpoint "
+ << options_.log_name << "; id=" << options_.id;
+ bool result =
+ port_to_receiver_.insert({port, {receiver, is_one_shot}}).second;
+ if (!result) {
+ RTC_LOG(LS_INFO) << "Can't bind receiver to used port " << desired_port
+ << " in endpoint " << options_.log_name
+ << "; id=" << options_.id;
+ return absl::nullopt;
+ }
+ RTC_LOG(LS_INFO) << "New receiver is binded to endpoint " << options_.log_name
+ << "; id=" << options_.id << " on port " << port;
+ return port;
+}
+
+uint16_t EmulatedEndpointImpl::NextPort() {
+ uint16_t out = next_port_;
+ if (next_port_ == std::numeric_limits<uint16_t>::max()) {
+ next_port_ = kFirstEphemeralPort;
+ } else {
+ next_port_++;
+ }
+ return out;
+}
+
+void EmulatedEndpointImpl::UnbindReceiver(uint16_t port) {
+ MutexLock lock(&receiver_lock_);
+ RTC_LOG(LS_INFO) << "Receiver is removed on port " << port
+ << " from endpoint " << options_.log_name
+ << "; id=" << options_.id;
+ port_to_receiver_.erase(port);
+}
+
+void EmulatedEndpointImpl::BindDefaultReceiver(
+ EmulatedNetworkReceiverInterface* receiver) {
+ MutexLock lock(&receiver_lock_);
+ RTC_CHECK(!default_receiver_.has_value())
+ << "Endpoint " << options_.log_name << "; id=" << options_.id
+ << " already has default receiver";
+ RTC_LOG(LS_INFO) << "Default receiver is binded to endpoint "
+ << options_.log_name << "; id=" << options_.id;
+ default_receiver_ = receiver;
+}
+
+void EmulatedEndpointImpl::UnbindDefaultReceiver() {
+ MutexLock lock(&receiver_lock_);
+ RTC_LOG(LS_INFO) << "Default receiver is removed from endpoint "
+ << options_.log_name << "; id=" << options_.id;
+ default_receiver_ = absl::nullopt;
+}
+
+rtc::IPAddress EmulatedEndpointImpl::GetPeerLocalAddress() const {
+ return options_.ip;
+}
+
+void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ if (!options_.allow_receive_packets_with_different_dest_ip) {
+ RTC_CHECK(packet.to.ipaddr() == options_.ip)
+ << "Routing error: wrong destination endpoint. Packet.to.ipaddr()=: "
+ << packet.to.ipaddr().ToString()
+ << "; Receiver options_.ip=" << options_.ip.ToString();
+ }
+ MutexLock lock(&receiver_lock_);
+ stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet.from.ipaddr(),
+ DataSize::Bytes(packet.ip_packet_size()));
+ auto it = port_to_receiver_.find(packet.to.port());
+ if (it == port_to_receiver_.end()) {
+ if (default_receiver_.has_value()) {
+ (*default_receiver_)->OnPacketReceived(std::move(packet));
+ return;
+ }
+ // It can happen, that remote peer closed connection, but there still some
+ // packets, that are going to it. It can happen during peer connection close
+ // process: one peer closed connection, second still sending data.
+ RTC_LOG(LS_INFO) << "Drop packet: no receiver registered in "
+ << options_.log_name << "; id=" << options_.id
+ << " on port " << packet.to.port()
+ << ". Packet source: " << packet.from.ToString();
+ stats_builder_.OnPacketDropped(packet.from.ipaddr(),
+ DataSize::Bytes(packet.ip_packet_size()));
+ return;
+ }
+ // Endpoint holds lock during packet processing to ensure that a call to
+ // UnbindReceiver followed by a delete of the receiver cannot race with this
+ // call to OnPacketReceived.
+ it->second.receiver->OnPacketReceived(std::move(packet));
+
+ if (it->second.is_one_shot) {
+ port_to_receiver_.erase(it);
+ }
+}
+
+void EmulatedEndpointImpl::Enable() {
+ RTC_DCHECK_RUN_ON(&enabled_state_checker_);
+ RTC_CHECK(!is_enabled_);
+ is_enabled_ = true;
+}
+
+void EmulatedEndpointImpl::Disable() {
+ RTC_DCHECK_RUN_ON(&enabled_state_checker_);
+ RTC_CHECK(is_enabled_);
+ is_enabled_ = false;
+}
+
+bool EmulatedEndpointImpl::Enabled() const {
+ RTC_DCHECK_RUN_ON(&enabled_state_checker_);
+ return is_enabled_;
+}
+
+EmulatedNetworkStats EmulatedEndpointImpl::stats() const {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ return stats_builder_.Build();
+}
+
+EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress(
+ const rtc::IPAddress& local_ip) const {
+ for (auto* endpoint : endpoints_) {
+ rtc::IPAddress peer_local_address = endpoint->GetPeerLocalAddress();
+ if (peer_local_address == local_ip) {
+ return endpoint;
+ }
+ }
+ RTC_CHECK(false) << "No network found for address" << local_ip.ToString();
+}
+
+EndpointsContainer::EndpointsContainer(
+ const std::vector<EmulatedEndpointImpl*>& endpoints,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : endpoints_(endpoints), stats_gathering_mode_(stats_gathering_mode) {}
+
+bool EndpointsContainer::HasEndpoint(EmulatedEndpointImpl* endpoint) const {
+ for (auto* e : endpoints_) {
+ if (e->GetId() == endpoint->GetId()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+std::vector<std::unique_ptr<rtc::Network>>
+EndpointsContainer::GetEnabledNetworks() const {
+ std::vector<std::unique_ptr<rtc::Network>> networks;
+ for (auto* endpoint : endpoints_) {
+ if (endpoint->Enabled()) {
+ networks.emplace_back(
+ std::make_unique<rtc::Network>(endpoint->network()));
+ }
+ }
+ return networks;
+}
+
+std::vector<EmulatedEndpoint*> EndpointsContainer::GetEndpoints() const {
+ return std::vector<EmulatedEndpoint*>(endpoints_.begin(), endpoints_.end());
+}
+
+EmulatedNetworkStats EndpointsContainer::GetStats() const {
+ EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode_);
+ for (auto* endpoint : endpoints_) {
+ stats_builder.AddEmulatedNetworkStats(endpoint->stats());
+ }
+ return stats_builder.Build();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/network_emulation.h b/third_party/libwebrtc/test/network/network_emulation.h
new file mode 100644
index 0000000000..dffabafa7c
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation.h
@@ -0,0 +1,467 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_NETWORK_EMULATION_H_
+#define TEST_NETWORK_NETWORK_EMULATION_H_
+
+#include <cstdint>
+#include <deque>
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/sequence_checker.h"
+#include "api/test/network_emulation/network_emulation_interfaces.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/simulated_network.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/network.h"
+#include "rtc_base/network_constants.h"
+#include "rtc_base/socket_address.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+
+// All methods of EmulatedNetworkOutgoingStatsBuilder have to be used on a
+// single thread. It may be created on another thread.
+class EmulatedNetworkOutgoingStatsBuilder {
+ public:
+ explicit EmulatedNetworkOutgoingStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ void OnPacketSent(Timestamp sent_time, DataSize packet_size);
+
+ void AddOutgoingStats(const EmulatedNetworkOutgoingStats& stats);
+
+ EmulatedNetworkOutgoingStats Build() const;
+
+ private:
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ EmulatedNetworkOutgoingStats stats_ RTC_GUARDED_BY(sequence_checker_);
+};
+
+// All methods of EmulatedNetworkIncomingStatsBuilder have to be used on a
+// single thread. It may be created on another thread.
+class EmulatedNetworkIncomingStatsBuilder {
+ public:
+ explicit EmulatedNetworkIncomingStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ void OnPacketDropped(DataSize packet_size);
+
+ void OnPacketReceived(Timestamp received_time, DataSize packet_size);
+
+ // Adds stats collected from another endpoints to the builder.
+ void AddIncomingStats(const EmulatedNetworkIncomingStats& stats);
+
+ EmulatedNetworkIncomingStats Build() const;
+
+ private:
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ EmulatedNetworkIncomingStats stats_ RTC_GUARDED_BY(sequence_checker_);
+};
+
+// All methods of EmulatedNetworkStatsBuilder have to be used on a single
+// thread. It may be created on another thread.
+class EmulatedNetworkStatsBuilder {
+ public:
+ explicit EmulatedNetworkStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+ explicit EmulatedNetworkStatsBuilder(
+ rtc::IPAddress local_ip,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ void OnPacketSent(Timestamp queued_time,
+ Timestamp sent_time,
+ rtc::IPAddress destination_ip,
+ DataSize packet_size);
+
+ void OnPacketDropped(rtc::IPAddress source_ip, DataSize packet_size);
+
+ void OnPacketReceived(Timestamp received_time,
+ rtc::IPAddress source_ip,
+ DataSize packet_size);
+
+ void AddEmulatedNetworkStats(const EmulatedNetworkStats& stats);
+
+ EmulatedNetworkStats Build() const;
+
+ private:
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ std::vector<rtc::IPAddress> local_addresses_
+ RTC_GUARDED_BY(sequence_checker_);
+ SamplesStatsCounter sent_packets_queue_wait_time_us_;
+ std::map<rtc::IPAddress, std::unique_ptr<EmulatedNetworkOutgoingStatsBuilder>>
+ outgoing_stats_per_destination_ RTC_GUARDED_BY(sequence_checker_);
+ std::map<rtc::IPAddress, std::unique_ptr<EmulatedNetworkIncomingStatsBuilder>>
+ incoming_stats_per_source_ RTC_GUARDED_BY(sequence_checker_);
+};
+
+// All methods of EmulatedNetworkNodeStatsBuilder have to be used on a
+// single thread. It may be created on another thread.
+class EmulatedNetworkNodeStatsBuilder {
+ public:
+ explicit EmulatedNetworkNodeStatsBuilder(
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ void AddPacketTransportTime(TimeDelta time, size_t packet_size);
+
+ void AddEmulatedNetworkNodeStats(const EmulatedNetworkNodeStats& stats);
+
+ EmulatedNetworkNodeStats Build() const;
+
+ private:
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ EmulatedNetworkNodeStats stats_ RTC_GUARDED_BY(sequence_checker_);
+};
+
+class LinkEmulation : public EmulatedNetworkReceiverInterface {
+ public:
+ LinkEmulation(Clock* clock,
+ rtc::TaskQueue* task_queue,
+ std::unique_ptr<NetworkBehaviorInterface> network_behavior,
+ EmulatedNetworkReceiverInterface* receiver,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : clock_(clock),
+ task_queue_(task_queue),
+ network_behavior_(std::move(network_behavior)),
+ receiver_(receiver),
+ stats_builder_(stats_gathering_mode) {}
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+
+ EmulatedNetworkNodeStats stats() const;
+
+ private:
+ struct StoredPacket {
+ uint64_t id;
+ Timestamp sent_time;
+ EmulatedIpPacket packet;
+ bool removed;
+ };
+ void Process(Timestamp at_time) RTC_RUN_ON(task_queue_);
+
+ Clock* const clock_;
+ rtc::TaskQueue* const task_queue_;
+ const std::unique_ptr<NetworkBehaviorInterface> network_behavior_
+ RTC_GUARDED_BY(task_queue_);
+ EmulatedNetworkReceiverInterface* const receiver_;
+
+ RepeatingTaskHandle process_task_ RTC_GUARDED_BY(task_queue_);
+ std::deque<StoredPacket> packets_ RTC_GUARDED_BY(task_queue_);
+ uint64_t next_packet_id_ RTC_GUARDED_BY(task_queue_) = 1;
+
+ EmulatedNetworkNodeStatsBuilder stats_builder_ RTC_GUARDED_BY(task_queue_);
+};
+
+// Represents a component responsible for routing packets based on their IP
+// address. All possible routes have to be set explicitly before packet for
+// desired destination will be seen for the first time. If route is unknown
+// the packet will be silently dropped.
+class NetworkRouterNode : public EmulatedNetworkReceiverInterface {
+ public:
+ explicit NetworkRouterNode(rtc::TaskQueue* task_queue);
+
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+ void SetReceiver(const rtc::IPAddress& dest_ip,
+ EmulatedNetworkReceiverInterface* receiver);
+ void RemoveReceiver(const rtc::IPAddress& dest_ip);
+ // Sets a default receive that will be used for all incoming packets for which
+ // there is no specific receiver binded to their destination port.
+ void SetDefaultReceiver(EmulatedNetworkReceiverInterface* receiver);
+ void RemoveDefaultReceiver();
+ void SetWatcher(std::function<void(const EmulatedIpPacket&)> watcher);
+ void SetFilter(std::function<bool(const EmulatedIpPacket&)> filter);
+
+ private:
+ rtc::TaskQueue* const task_queue_;
+ absl::optional<EmulatedNetworkReceiverInterface*> default_receiver_
+ RTC_GUARDED_BY(task_queue_);
+ std::map<rtc::IPAddress, EmulatedNetworkReceiverInterface*> routing_
+ RTC_GUARDED_BY(task_queue_);
+ std::function<void(const EmulatedIpPacket&)> watcher_
+ RTC_GUARDED_BY(task_queue_);
+ std::function<bool(const EmulatedIpPacket&)> filter_
+ RTC_GUARDED_BY(task_queue_);
+};
+
+// Represents node in the emulated network. Nodes can be connected with each
+// other to form different networks with different behavior. The behavior of
+// the node itself is determined by a concrete implementation of
+// NetworkBehaviorInterface that is provided on construction.
+class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface {
+ public:
+ // Creates node based on `network_behavior`. The specified `packet_overhead`
+ // is added to the size of each packet in the information provided to
+ // `network_behavior`.
+ // `task_queue` is used to process packets and to forward the packets when
+ // they are ready.
+ EmulatedNetworkNode(
+ Clock* clock,
+ rtc::TaskQueue* task_queue,
+ std::unique_ptr<NetworkBehaviorInterface> network_behavior,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+ ~EmulatedNetworkNode() override;
+
+ EmulatedNetworkNode(const EmulatedNetworkNode&) = delete;
+ EmulatedNetworkNode& operator=(const EmulatedNetworkNode&) = delete;
+
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+
+ LinkEmulation* link() { return &link_; }
+ NetworkRouterNode* router() { return &router_; }
+ EmulatedNetworkNodeStats stats() const;
+
+ // Creates a route for the given receiver_ip over all the given nodes to the
+ // given receiver.
+ static void CreateRoute(const rtc::IPAddress& receiver_ip,
+ std::vector<EmulatedNetworkNode*> nodes,
+ EmulatedNetworkReceiverInterface* receiver);
+ static void ClearRoute(const rtc::IPAddress& receiver_ip,
+ std::vector<EmulatedNetworkNode*> nodes);
+
+ private:
+ NetworkRouterNode router_;
+ LinkEmulation link_;
+};
+
+// Represents single network interface on the device.
+// It will be used as sender from socket side to send data to the network and
+// will act as packet receiver from emulated network side to receive packets
+// from other EmulatedNetworkNodes.
+class EmulatedEndpointImpl : public EmulatedEndpoint {
+ public:
+ struct Options {
+ Options(uint64_t id,
+ const rtc::IPAddress& ip,
+ const EmulatedEndpointConfig& config,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ // TODO(titovartem) check if we can remove id.
+ uint64_t id;
+ // Endpoint local IP address.
+ rtc::IPAddress ip;
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode;
+ rtc::AdapterType type;
+ // Allow endpoint to send packets specifying source IP address different to
+ // the current endpoint IP address. If false endpoint will crash if attempt
+ // to send such packet will be done.
+ bool allow_send_packet_with_different_source_ip;
+ // Allow endpoint to receive packet with destination IP address different to
+ // the current endpoint IP address. If false endpoint will crash if such
+ // packet will arrive.
+ bool allow_receive_packets_with_different_dest_ip;
+ // Name of the endpoint used for logging purposes.
+ std::string log_name;
+ };
+
+ EmulatedEndpointImpl(const Options& options,
+ bool is_enabled,
+ rtc::TaskQueue* task_queue,
+ Clock* clock);
+ ~EmulatedEndpointImpl() override;
+
+ uint64_t GetId() const;
+
+ NetworkRouterNode* router() { return &router_; }
+
+ void SendPacket(const rtc::SocketAddress& from,
+ const rtc::SocketAddress& to,
+ rtc::CopyOnWriteBuffer packet_data,
+ uint16_t application_overhead = 0) override;
+
+ absl::optional<uint16_t> BindReceiver(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver) override;
+ // Binds a receiver, and automatically removes the binding after first call to
+ // OnPacketReceived.
+ absl::optional<uint16_t> BindOneShotReceiver(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver);
+ void UnbindReceiver(uint16_t port) override;
+ void BindDefaultReceiver(EmulatedNetworkReceiverInterface* receiver) override;
+ void UnbindDefaultReceiver() override;
+
+ rtc::IPAddress GetPeerLocalAddress() const override;
+
+ // Will be called to deliver packet into endpoint from network node.
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+
+ void Enable();
+ void Disable();
+ bool Enabled() const;
+
+ const rtc::Network& network() const { return *network_.get(); }
+
+ EmulatedNetworkStats stats() const;
+
+ private:
+ struct ReceiverBinding {
+ EmulatedNetworkReceiverInterface* receiver;
+ bool is_one_shot;
+ };
+
+ absl::optional<uint16_t> BindReceiverInternal(
+ uint16_t desired_port,
+ EmulatedNetworkReceiverInterface* receiver,
+ bool is_one_shot);
+
+ static constexpr uint16_t kFirstEphemeralPort = 49152;
+ uint16_t NextPort() RTC_EXCLUSIVE_LOCKS_REQUIRED(receiver_lock_);
+
+ Mutex receiver_lock_;
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker enabled_state_checker_;
+
+ const Options options_;
+ bool is_enabled_ RTC_GUARDED_BY(enabled_state_checker_);
+ Clock* const clock_;
+ rtc::TaskQueue* const task_queue_;
+ std::unique_ptr<rtc::Network> network_;
+ NetworkRouterNode router_;
+
+ uint16_t next_port_ RTC_GUARDED_BY(receiver_lock_);
+ absl::optional<EmulatedNetworkReceiverInterface*> default_receiver_
+ RTC_GUARDED_BY(receiver_lock_);
+ std::map<uint16_t, ReceiverBinding> port_to_receiver_
+ RTC_GUARDED_BY(receiver_lock_);
+
+ EmulatedNetworkStatsBuilder stats_builder_ RTC_GUARDED_BY(task_queue_);
+};
+
+class EmulatedRoute {
+ public:
+ EmulatedRoute(EmulatedEndpointImpl* from,
+ std::vector<EmulatedNetworkNode*> via_nodes,
+ EmulatedEndpointImpl* to,
+ bool is_default)
+ : from(from),
+ via_nodes(std::move(via_nodes)),
+ to(to),
+ active(true),
+ is_default(is_default) {}
+
+ EmulatedEndpointImpl* from;
+ std::vector<EmulatedNetworkNode*> via_nodes;
+ EmulatedEndpointImpl* to;
+ bool active;
+ bool is_default;
+};
+
+// This object is immutable and so thread safe.
+class EndpointsContainer {
+ public:
+ EndpointsContainer(const std::vector<EmulatedEndpointImpl*>& endpoints,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+
+ EmulatedEndpointImpl* LookupByLocalAddress(
+ const rtc::IPAddress& local_ip) const;
+ bool HasEndpoint(EmulatedEndpointImpl* endpoint) const;
+ // Returns list of networks for enabled endpoints. Caller takes ownership of
+ // returned rtc::Network objects.
+ std::vector<std::unique_ptr<rtc::Network>> GetEnabledNetworks() const;
+ std::vector<EmulatedEndpoint*> GetEndpoints() const;
+ EmulatedNetworkStats GetStats() const;
+
+ private:
+ const std::vector<EmulatedEndpointImpl*> endpoints_;
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+};
+
+template <typename FakePacketType>
+class FakePacketRoute : public EmulatedNetworkReceiverInterface {
+ public:
+ FakePacketRoute(EmulatedRoute* route,
+ std::function<void(FakePacketType, Timestamp)> action)
+ : route_(route),
+ action_(std::move(action)),
+ send_addr_(route_->from->GetPeerLocalAddress(), 0),
+ recv_addr_(route_->to->GetPeerLocalAddress(),
+ *route_->to->BindReceiver(0, this)) {}
+
+ ~FakePacketRoute() { route_->to->UnbindReceiver(recv_addr_.port()); }
+
+ void SendPacket(size_t size, FakePacketType packet) {
+ RTC_CHECK_GE(size, sizeof(int));
+ sent_.emplace(next_packet_id_, packet);
+ rtc::CopyOnWriteBuffer buf(size);
+ reinterpret_cast<int*>(buf.MutableData())[0] = next_packet_id_++;
+ route_->from->SendPacket(send_addr_, recv_addr_, buf);
+ }
+
+ void OnPacketReceived(EmulatedIpPacket packet) override {
+ int packet_id = reinterpret_cast<const int*>(packet.data.data())[0];
+ action_(std::move(sent_[packet_id]), packet.arrival_time);
+ sent_.erase(packet_id);
+ }
+
+ private:
+ EmulatedRoute* const route_;
+ const std::function<void(FakePacketType, Timestamp)> action_;
+ const rtc::SocketAddress send_addr_;
+ const rtc::SocketAddress recv_addr_;
+ int next_packet_id_ = 0;
+ std::map<int, FakePacketType> sent_;
+};
+
+template <typename RequestPacketType, typename ResponsePacketType>
+class TwoWayFakeTrafficRoute {
+ public:
+ class TrafficHandlerInterface {
+ public:
+ virtual void OnRequest(RequestPacketType, Timestamp) = 0;
+ virtual void OnResponse(ResponsePacketType, Timestamp) = 0;
+ virtual ~TrafficHandlerInterface() = default;
+ };
+ TwoWayFakeTrafficRoute(TrafficHandlerInterface* handler,
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route)
+ : handler_(handler),
+ request_handler_{send_route,
+ [&](RequestPacketType packet, Timestamp arrival_time) {
+ handler_->OnRequest(std::move(packet), arrival_time);
+ }},
+ response_handler_{
+ ret_route, [&](ResponsePacketType packet, Timestamp arrival_time) {
+ handler_->OnResponse(std::move(packet), arrival_time);
+ }} {}
+ void SendRequest(size_t size, RequestPacketType packet) {
+ request_handler_.SendPacket(size, std::move(packet));
+ }
+ void SendResponse(size_t size, ResponsePacketType packet) {
+ response_handler_.SendPacket(size, std::move(packet));
+ }
+
+ private:
+ TrafficHandlerInterface* handler_;
+ FakePacketRoute<RequestPacketType> request_handler_;
+ FakePacketRoute<ResponsePacketType> response_handler_;
+};
+} // namespace webrtc
+
+#endif // TEST_NETWORK_NETWORK_EMULATION_H_
diff --git a/third_party/libwebrtc/test/network/network_emulation_manager.cc b/third_party/libwebrtc/test/network/network_emulation_manager.cc
new file mode 100644
index 0000000000..97c0bc1ba8
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation_manager.cc
@@ -0,0 +1,373 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/network_emulation_manager.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "call/simulated_network.h"
+#include "test/network/emulated_turn_server.h"
+#include "test/network/traffic_route.h"
+#include "test/time_controller/real_time_controller.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+// uint32_t representation of 192.168.0.0 address
+constexpr uint32_t kMinIPv4Address = 0xC0A80000;
+// uint32_t representation of 192.168.255.255 address
+constexpr uint32_t kMaxIPv4Address = 0xC0A8FFFF;
+
+std::unique_ptr<TimeController> CreateTimeController(TimeMode mode) {
+ switch (mode) {
+ case TimeMode::kRealTime:
+ return std::make_unique<RealTimeController>();
+ case TimeMode::kSimulated:
+ // Using an offset of 100000 to get nice fixed width and readable
+ // timestamps in typical test scenarios.
+ const Timestamp kSimulatedStartTime = Timestamp::Seconds(100000);
+ return std::make_unique<GlobalSimulatedTimeController>(
+ kSimulatedStartTime);
+ }
+}
+} // namespace
+
+NetworkEmulationManagerImpl::NetworkEmulationManagerImpl(
+ TimeMode mode,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode)
+ : time_mode_(mode),
+ stats_gathering_mode_(stats_gathering_mode),
+ time_controller_(CreateTimeController(mode)),
+ clock_(time_controller_->GetClock()),
+ next_node_id_(1),
+ next_ip4_address_(kMinIPv4Address),
+ task_queue_(time_controller_->GetTaskQueueFactory()->CreateTaskQueue(
+ "NetworkEmulation",
+ TaskQueueFactory::Priority::NORMAL)) {}
+
+// TODO(srte): Ensure that any pending task that must be run for consistency
+// (such as stats collection tasks) are not cancelled when the task queue is
+// destroyed.
+NetworkEmulationManagerImpl::~NetworkEmulationManagerImpl() {
+ for (auto& turn_server : turn_servers_) {
+ turn_server->Stop();
+ }
+}
+
+EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode(
+ BuiltInNetworkBehaviorConfig config,
+ uint64_t random_seed) {
+ return CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(config, random_seed));
+}
+
+EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode(
+ std::unique_ptr<NetworkBehaviorInterface> network_behavior) {
+ auto node = std::make_unique<EmulatedNetworkNode>(
+ clock_, &task_queue_, std::move(network_behavior), stats_gathering_mode_);
+ EmulatedNetworkNode* out = node.get();
+ task_queue_.PostTask([this, node = std::move(node)]() mutable {
+ network_nodes_.push_back(std::move(node));
+ });
+ return out;
+}
+
+NetworkEmulationManager::SimulatedNetworkNode::Builder
+NetworkEmulationManagerImpl::NodeBuilder() {
+ return SimulatedNetworkNode::Builder(this);
+}
+
+EmulatedEndpointImpl* NetworkEmulationManagerImpl::CreateEndpoint(
+ EmulatedEndpointConfig config) {
+ absl::optional<rtc::IPAddress> ip = config.ip;
+ if (!ip) {
+ switch (config.generated_ip_family) {
+ case EmulatedEndpointConfig::IpAddressFamily::kIpv4:
+ ip = GetNextIPv4Address();
+ RTC_CHECK(ip) << "All auto generated IPv4 addresses exhausted";
+ break;
+ case EmulatedEndpointConfig::IpAddressFamily::kIpv6:
+ ip = GetNextIPv4Address();
+ RTC_CHECK(ip) << "All auto generated IPv6 addresses exhausted";
+ ip = ip->AsIPv6Address();
+ break;
+ }
+ }
+
+ bool res = used_ip_addresses_.insert(*ip).second;
+ RTC_CHECK(res) << "IP=" << ip->ToString() << " already in use";
+ auto node = std::make_unique<EmulatedEndpointImpl>(
+ EmulatedEndpointImpl::Options(next_node_id_++, *ip, config,
+ stats_gathering_mode_),
+ config.start_as_enabled, &task_queue_, clock_);
+ EmulatedEndpointImpl* out = node.get();
+ endpoints_.push_back(std::move(node));
+ return out;
+}
+
+void NetworkEmulationManagerImpl::EnableEndpoint(EmulatedEndpoint* endpoint) {
+ EmulatedNetworkManager* network_manager =
+ endpoint_to_network_manager_[endpoint];
+ RTC_CHECK(network_manager);
+ network_manager->EnableEndpoint(static_cast<EmulatedEndpointImpl*>(endpoint));
+}
+
+void NetworkEmulationManagerImpl::DisableEndpoint(EmulatedEndpoint* endpoint) {
+ EmulatedNetworkManager* network_manager =
+ endpoint_to_network_manager_[endpoint];
+ RTC_CHECK(network_manager);
+ network_manager->DisableEndpoint(
+ static_cast<EmulatedEndpointImpl*>(endpoint));
+}
+
+EmulatedRoute* NetworkEmulationManagerImpl::CreateRoute(
+ EmulatedEndpoint* from,
+ const std::vector<EmulatedNetworkNode*>& via_nodes,
+ EmulatedEndpoint* to) {
+ // Because endpoint has no send node by default at least one should be
+ // provided here.
+ RTC_CHECK(!via_nodes.empty());
+
+ static_cast<EmulatedEndpointImpl*>(from)->router()->SetReceiver(
+ to->GetPeerLocalAddress(), via_nodes[0]);
+ EmulatedNetworkNode* cur_node = via_nodes[0];
+ for (size_t i = 1; i < via_nodes.size(); ++i) {
+ cur_node->router()->SetReceiver(to->GetPeerLocalAddress(), via_nodes[i]);
+ cur_node = via_nodes[i];
+ }
+ cur_node->router()->SetReceiver(to->GetPeerLocalAddress(), to);
+
+ std::unique_ptr<EmulatedRoute> route = std::make_unique<EmulatedRoute>(
+ static_cast<EmulatedEndpointImpl*>(from), std::move(via_nodes),
+ static_cast<EmulatedEndpointImpl*>(to), /*is_default=*/false);
+ EmulatedRoute* out = route.get();
+ routes_.push_back(std::move(route));
+ return out;
+}
+
+EmulatedRoute* NetworkEmulationManagerImpl::CreateRoute(
+ const std::vector<EmulatedNetworkNode*>& via_nodes) {
+ EmulatedEndpoint* from = CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* to = CreateEndpoint(EmulatedEndpointConfig());
+ return CreateRoute(from, via_nodes, to);
+}
+
+EmulatedRoute* NetworkEmulationManagerImpl::CreateDefaultRoute(
+ EmulatedEndpoint* from,
+ const std::vector<EmulatedNetworkNode*>& via_nodes,
+ EmulatedEndpoint* to) {
+ // Because endpoint has no send node by default at least one should be
+ // provided here.
+ RTC_CHECK(!via_nodes.empty());
+
+ static_cast<EmulatedEndpointImpl*>(from)->router()->SetDefaultReceiver(
+ via_nodes[0]);
+ EmulatedNetworkNode* cur_node = via_nodes[0];
+ for (size_t i = 1; i < via_nodes.size(); ++i) {
+ cur_node->router()->SetDefaultReceiver(via_nodes[i]);
+ cur_node = via_nodes[i];
+ }
+ cur_node->router()->SetDefaultReceiver(to);
+
+ std::unique_ptr<EmulatedRoute> route = std::make_unique<EmulatedRoute>(
+ static_cast<EmulatedEndpointImpl*>(from), std::move(via_nodes),
+ static_cast<EmulatedEndpointImpl*>(to), /*is_default=*/true);
+ EmulatedRoute* out = route.get();
+ routes_.push_back(std::move(route));
+ return out;
+}
+
+void NetworkEmulationManagerImpl::ClearRoute(EmulatedRoute* route) {
+ RTC_CHECK(route->active) << "Route already cleared";
+ task_queue_.SendTask([route]() {
+ // Remove receiver from intermediate nodes.
+ for (auto* node : route->via_nodes) {
+ if (route->is_default) {
+ node->router()->RemoveDefaultReceiver();
+ } else {
+ node->router()->RemoveReceiver(route->to->GetPeerLocalAddress());
+ }
+ }
+ // Remove destination endpoint from source endpoint's router.
+ if (route->is_default) {
+ route->from->router()->RemoveDefaultReceiver();
+ } else {
+ route->from->router()->RemoveReceiver(route->to->GetPeerLocalAddress());
+ }
+
+ route->active = false;
+ });
+}
+
+TcpMessageRoute* NetworkEmulationManagerImpl::CreateTcpRoute(
+ EmulatedRoute* send_route,
+ EmulatedRoute* ret_route) {
+ auto tcp_route = std::make_unique<TcpMessageRouteImpl>(
+ clock_, task_queue_.Get(), send_route, ret_route);
+ auto* route_ptr = tcp_route.get();
+ task_queue_.PostTask([this, tcp_route = std::move(tcp_route)]() mutable {
+ tcp_message_routes_.push_back(std::move(tcp_route));
+ });
+ return route_ptr;
+}
+
+CrossTrafficRoute* NetworkEmulationManagerImpl::CreateCrossTrafficRoute(
+ const std::vector<EmulatedNetworkNode*>& via_nodes) {
+ RTC_CHECK(!via_nodes.empty());
+ EmulatedEndpointImpl* endpoint = CreateEndpoint(EmulatedEndpointConfig());
+
+ // Setup a route via specified nodes.
+ EmulatedNetworkNode* cur_node = via_nodes[0];
+ for (size_t i = 1; i < via_nodes.size(); ++i) {
+ cur_node->router()->SetReceiver(endpoint->GetPeerLocalAddress(),
+ via_nodes[i]);
+ cur_node = via_nodes[i];
+ }
+ cur_node->router()->SetReceiver(endpoint->GetPeerLocalAddress(), endpoint);
+
+ std::unique_ptr<CrossTrafficRoute> traffic_route =
+ std::make_unique<CrossTrafficRouteImpl>(clock_, via_nodes[0], endpoint);
+ CrossTrafficRoute* out = traffic_route.get();
+ traffic_routes_.push_back(std::move(traffic_route));
+ return out;
+}
+
+CrossTrafficGenerator* NetworkEmulationManagerImpl::StartCrossTraffic(
+ std::unique_ptr<CrossTrafficGenerator> generator) {
+ CrossTrafficGenerator* out = generator.get();
+ task_queue_.PostTask([this, generator = std::move(generator)]() mutable {
+ auto* generator_ptr = generator.get();
+
+ auto repeating_task_handle =
+ RepeatingTaskHandle::Start(task_queue_.Get(), [this, generator_ptr] {
+ generator_ptr->Process(Now());
+ return generator_ptr->GetProcessInterval();
+ });
+
+ cross_traffics_.push_back(CrossTrafficSource(
+ std::move(generator), std::move(repeating_task_handle)));
+ });
+ return out;
+}
+
+void NetworkEmulationManagerImpl::StopCrossTraffic(
+ CrossTrafficGenerator* generator) {
+ task_queue_.PostTask([=]() {
+ auto it = std::find_if(cross_traffics_.begin(), cross_traffics_.end(),
+ [=](const CrossTrafficSource& el) {
+ return el.first.get() == generator;
+ });
+ it->second.Stop();
+ cross_traffics_.erase(it);
+ });
+}
+
+EmulatedNetworkManagerInterface*
+NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface(
+ const std::vector<EmulatedEndpoint*>& endpoints) {
+ std::vector<EmulatedEndpointImpl*> endpoint_impls;
+ endpoint_impls.reserve(endpoints.size());
+ for (EmulatedEndpoint* endpoint : endpoints) {
+ endpoint_impls.push_back(static_cast<EmulatedEndpointImpl*>(endpoint));
+ }
+ auto endpoints_container = std::make_unique<EndpointsContainer>(
+ endpoint_impls, stats_gathering_mode_);
+ auto network_manager = std::make_unique<EmulatedNetworkManager>(
+ time_controller_.get(), &task_queue_, endpoints_container.get());
+ for (auto* endpoint : endpoints) {
+ // Associate endpoint with network manager.
+ bool insertion_result =
+ endpoint_to_network_manager_.insert({endpoint, network_manager.get()})
+ .second;
+ RTC_CHECK(insertion_result)
+ << "Endpoint ip=" << endpoint->GetPeerLocalAddress().ToString()
+ << " is already used for another network";
+ }
+
+ EmulatedNetworkManagerInterface* out = network_manager.get();
+
+ endpoints_containers_.push_back(std::move(endpoints_container));
+ network_managers_.push_back(std::move(network_manager));
+ return out;
+}
+
+void NetworkEmulationManagerImpl::GetStats(
+ rtc::ArrayView<EmulatedEndpoint* const> endpoints,
+ std::function<void(EmulatedNetworkStats)> stats_callback) {
+ task_queue_.PostTask([endpoints, stats_callback,
+ stats_gathering_mode = stats_gathering_mode_]() {
+ EmulatedNetworkStatsBuilder stats_builder(stats_gathering_mode);
+ for (auto* endpoint : endpoints) {
+ // It's safe to cast here because EmulatedEndpointImpl can be the only
+ // implementation of EmulatedEndpoint, because only it has access to
+ // EmulatedEndpoint constructor.
+ auto endpoint_impl = static_cast<EmulatedEndpointImpl*>(endpoint);
+ stats_builder.AddEmulatedNetworkStats(endpoint_impl->stats());
+ }
+ stats_callback(stats_builder.Build());
+ });
+}
+
+void NetworkEmulationManagerImpl::GetStats(
+ rtc::ArrayView<EmulatedNetworkNode* const> nodes,
+ std::function<void(EmulatedNetworkNodeStats)> stats_callback) {
+ task_queue_.PostTask(
+ [nodes, stats_callback, stats_gathering_mode = stats_gathering_mode_]() {
+ EmulatedNetworkNodeStatsBuilder stats_builder(stats_gathering_mode);
+ for (auto* node : nodes) {
+ stats_builder.AddEmulatedNetworkNodeStats(node->stats());
+ }
+ stats_callback(stats_builder.Build());
+ });
+}
+
+absl::optional<rtc::IPAddress>
+NetworkEmulationManagerImpl::GetNextIPv4Address() {
+ uint32_t addresses_count = kMaxIPv4Address - kMinIPv4Address;
+ for (uint32_t i = 0; i < addresses_count; i++) {
+ rtc::IPAddress ip(next_ip4_address_);
+ if (next_ip4_address_ == kMaxIPv4Address) {
+ next_ip4_address_ = kMinIPv4Address;
+ } else {
+ next_ip4_address_++;
+ }
+ if (used_ip_addresses_.find(ip) == used_ip_addresses_.end()) {
+ return ip;
+ }
+ }
+ return absl::nullopt;
+}
+
+Timestamp NetworkEmulationManagerImpl::Now() const {
+ return clock_->CurrentTime();
+}
+
+EmulatedTURNServerInterface* NetworkEmulationManagerImpl::CreateTURNServer(
+ EmulatedTURNServerConfig config) {
+ auto* client = CreateEndpoint(config.client_config);
+ auto* peer = CreateEndpoint(config.client_config);
+ char buf[128];
+ rtc::SimpleStringBuilder str(buf);
+ str.AppendFormat("turn_server_%u",
+ static_cast<unsigned>(turn_servers_.size()));
+ auto turn = std::make_unique<EmulatedTURNServer>(
+ time_controller_->CreateThread(str.str()), client, peer);
+ auto out = turn.get();
+ turn_servers_.push_back(std::move(turn));
+ return out;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/network_emulation_manager.h b/third_party/libwebrtc/test/network/network_emulation_manager.h
new file mode 100644
index 0000000000..29debca693
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation_manager.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_NETWORK_EMULATION_MANAGER_H_
+#define TEST_NETWORK_NETWORK_EMULATION_MANAGER_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/simulated_network.h"
+#include "api/test/time_controller.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "system_wrappers/include/clock.h"
+#include "test/network/cross_traffic.h"
+#include "test/network/emulated_network_manager.h"
+#include "test/network/emulated_turn_server.h"
+#include "test/network/network_emulation.h"
+
+namespace webrtc {
+namespace test {
+
+class NetworkEmulationManagerImpl : public NetworkEmulationManager {
+ public:
+ NetworkEmulationManagerImpl(
+ TimeMode mode,
+ EmulatedNetworkStatsGatheringMode stats_gathering_mode);
+ ~NetworkEmulationManagerImpl();
+
+ EmulatedNetworkNode* CreateEmulatedNode(BuiltInNetworkBehaviorConfig config,
+ uint64_t random_seed = 1) override;
+ EmulatedNetworkNode* CreateEmulatedNode(
+ std::unique_ptr<NetworkBehaviorInterface> network_behavior) override;
+
+ SimulatedNetworkNode::Builder NodeBuilder() override;
+
+ EmulatedEndpointImpl* CreateEndpoint(EmulatedEndpointConfig config) override;
+ void EnableEndpoint(EmulatedEndpoint* endpoint) override;
+ void DisableEndpoint(EmulatedEndpoint* endpoint) override;
+
+ EmulatedRoute* CreateRoute(EmulatedEndpoint* from,
+ const std::vector<EmulatedNetworkNode*>& via_nodes,
+ EmulatedEndpoint* to) override;
+
+ EmulatedRoute* CreateRoute(
+ const std::vector<EmulatedNetworkNode*>& via_nodes) override;
+
+ EmulatedRoute* CreateDefaultRoute(
+ EmulatedEndpoint* from,
+ const std::vector<EmulatedNetworkNode*>& via_nodes,
+ EmulatedEndpoint* to) override;
+
+ void ClearRoute(EmulatedRoute* route) override;
+
+ TcpMessageRoute* CreateTcpRoute(EmulatedRoute* send_route,
+ EmulatedRoute* ret_route) override;
+
+ CrossTrafficRoute* CreateCrossTrafficRoute(
+ const std::vector<EmulatedNetworkNode*>& via_nodes) override;
+
+ CrossTrafficGenerator* StartCrossTraffic(
+ std::unique_ptr<CrossTrafficGenerator> generator) override;
+ void StopCrossTraffic(CrossTrafficGenerator* generator) override;
+
+ EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface(
+ const std::vector<EmulatedEndpoint*>& endpoints) override;
+
+ void GetStats(
+ rtc::ArrayView<EmulatedEndpoint* const> endpoints,
+ std::function<void(EmulatedNetworkStats)> stats_callback) override;
+
+ void GetStats(
+ rtc::ArrayView<EmulatedNetworkNode* const> nodes,
+ std::function<void(EmulatedNetworkNodeStats)> stats_callback) override;
+
+ TimeController* time_controller() override { return time_controller_.get(); }
+
+ TimeMode time_mode() const override { return time_mode_; }
+
+ Timestamp Now() const;
+
+ EmulatedTURNServerInterface* CreateTURNServer(
+ EmulatedTURNServerConfig config) override;
+
+ private:
+ using CrossTrafficSource =
+ std::pair<std::unique_ptr<CrossTrafficGenerator>, RepeatingTaskHandle>;
+
+ absl::optional<rtc::IPAddress> GetNextIPv4Address();
+
+ const TimeMode time_mode_;
+ const EmulatedNetworkStatsGatheringMode stats_gathering_mode_;
+ const std::unique_ptr<TimeController> time_controller_;
+ Clock* const clock_;
+ int next_node_id_;
+
+ RepeatingTaskHandle process_task_handle_;
+
+ uint32_t next_ip4_address_;
+ std::set<rtc::IPAddress> used_ip_addresses_;
+
+ // All objects can be added to the manager only when it is idle.
+ std::vector<std::unique_ptr<EmulatedEndpoint>> endpoints_;
+ std::vector<std::unique_ptr<EmulatedNetworkNode>> network_nodes_;
+ std::vector<std::unique_ptr<EmulatedRoute>> routes_;
+ std::vector<std::unique_ptr<CrossTrafficRoute>> traffic_routes_;
+ std::vector<CrossTrafficSource> cross_traffics_;
+ std::list<std::unique_ptr<TcpMessageRouteImpl>> tcp_message_routes_;
+ std::vector<std::unique_ptr<EndpointsContainer>> endpoints_containers_;
+ std::vector<std::unique_ptr<EmulatedNetworkManager>> network_managers_;
+ std::vector<std::unique_ptr<EmulatedTURNServer>> turn_servers_;
+
+ std::map<EmulatedEndpoint*, EmulatedNetworkManager*>
+ endpoint_to_network_manager_;
+
+ // Must be the last field, so it will be deleted first, because tasks
+ // in the TaskQueue can access other fields of the instance of this class.
+ TaskQueueForTest task_queue_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_NETWORK_EMULATION_MANAGER_H_
diff --git a/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc b/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc
new file mode 100644
index 0000000000..51a45a8234
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation_pc_unittest.cc
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdint>
+#include <memory>
+
+#include "api/call/call_factory_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/scoped_refptr.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/transport/field_trial_based_config.h"
+#include "call/simulated_network.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "p2p/base/basic_packet_socket_factory.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/peer_connection_wrapper.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/network/network_emulation.h"
+#include "test/network/network_emulation_manager.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr int kDefaultTimeoutMs = 1000;
+constexpr int kMaxAptitude = 32000;
+constexpr int kSamplingFrequency = 48000;
+constexpr char kSignalThreadName[] = "signaling_thread";
+
+bool AddIceCandidates(PeerConnectionWrapper* peer,
+ std::vector<const IceCandidateInterface*> candidates) {
+ bool success = true;
+ for (const auto candidate : candidates) {
+ if (!peer->pc()->AddIceCandidate(candidate)) {
+ success = false;
+ }
+ }
+ return success;
+}
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
+ rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread) {
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory();
+ pcf_deps.call_factory = CreateCallFactory();
+ pcf_deps.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(pcf_deps.task_queue_factory.get());
+ pcf_deps.network_thread = network_thread;
+ pcf_deps.signaling_thread = signaling_thread;
+ pcf_deps.trials = std::make_unique<FieldTrialBasedConfig>();
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
+ media_deps.adm = TestAudioDeviceModule::Create(
+ media_deps.task_queue_factory,
+ TestAudioDeviceModule::CreatePulsedNoiseCapturer(kMaxAptitude,
+ kSamplingFrequency),
+ TestAudioDeviceModule::CreateDiscardRenderer(kSamplingFrequency),
+ /*speed=*/1.f);
+ media_deps.trials = pcf_deps.trials.get();
+ SetMediaEngineDefaults(&media_deps);
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ return CreateModularPeerConnectionFactory(std::move(pcf_deps));
+}
+
+rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface>& pcf,
+ PeerConnectionObserver* observer,
+ rtc::PacketSocketFactory* packet_socket_factory,
+ rtc::NetworkManager* network_manager,
+ EmulatedTURNServerInterface* turn_server = nullptr) {
+ PeerConnectionDependencies pc_deps(observer);
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ network_manager, packet_socket_factory);
+
+ // This test does not support TCP
+ int flags = cricket::PORTALLOCATOR_DISABLE_TCP;
+ port_allocator->set_flags(port_allocator->flags() | flags);
+
+ pc_deps.allocator = std::move(port_allocator);
+ PeerConnectionInterface::RTCConfiguration rtc_configuration;
+ rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan;
+ if (turn_server != nullptr) {
+ webrtc::PeerConnectionInterface::IceServer server;
+ server.username = turn_server->GetIceServerConfig().username;
+ server.password = turn_server->GetIceServerConfig().username;
+ server.urls.push_back(turn_server->GetIceServerConfig().url);
+ rtc_configuration.servers.push_back(server);
+ }
+
+ auto result =
+ pcf->CreatePeerConnectionOrError(rtc_configuration, std::move(pc_deps));
+ if (!result.ok()) {
+ return nullptr;
+ }
+ return result.MoveValue();
+}
+
+} // namespace
+
+TEST(NetworkEmulationManagerPCTest, Run) {
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName(kSignalThreadName, nullptr);
+ signaling_thread->Start();
+
+ // Setup emulated network
+ NetworkEmulationManagerImpl emulation(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+
+ EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = emulation.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedEndpoint* alice_endpoint =
+ emulation.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ emulation.CreateEndpoint(EmulatedEndpointConfig());
+ emulation.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint);
+ emulation.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ emulation.CreateEmulatedNetworkManagerInterface({alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ emulation.CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ // Setup peer connections.
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> alice_pcf;
+ rtc::scoped_refptr<PeerConnectionInterface> alice_pc;
+ std::unique_ptr<MockPeerConnectionObserver> alice_observer =
+ std::make_unique<MockPeerConnectionObserver>();
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> bob_pcf;
+ rtc::scoped_refptr<PeerConnectionInterface> bob_pc;
+ std::unique_ptr<MockPeerConnectionObserver> bob_observer =
+ std::make_unique<MockPeerConnectionObserver>();
+
+ SendTask(signaling_thread.get(), [&]() {
+ alice_pcf = CreatePeerConnectionFactory(signaling_thread.get(),
+ alice_network->network_thread());
+ alice_pc = CreatePeerConnection(alice_pcf, alice_observer.get(),
+ alice_network->packet_socket_factory(),
+ alice_network->network_manager());
+
+ bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(),
+ bob_network->network_thread());
+ bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get(),
+ bob_network->packet_socket_factory(),
+ bob_network->network_manager());
+ });
+
+ std::unique_ptr<PeerConnectionWrapper> alice =
+ std::make_unique<PeerConnectionWrapper>(alice_pcf, alice_pc,
+ std::move(alice_observer));
+ std::unique_ptr<PeerConnectionWrapper> bob =
+ std::make_unique<PeerConnectionWrapper>(bob_pcf, bob_pc,
+ std::move(bob_observer));
+
+ SendTask(signaling_thread.get(), [&]() {
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ alice_pcf->CreateAudioSource(cricket::AudioOptions());
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ alice_pcf->CreateAudioTrack("audio", source.get());
+ alice->AddTransceiver(track);
+
+ // Connect peers.
+ ASSERT_TRUE(alice->ExchangeOfferAnswerWith(bob.get()));
+ // Do the SDP negotiation, and also exchange ice candidates.
+ ASSERT_TRUE_WAIT(
+ alice->signaling_state() == PeerConnectionInterface::kStable,
+ kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(alice->IsIceGatheringDone(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(bob->IsIceGatheringDone(), kDefaultTimeoutMs);
+
+ // Connect an ICE candidate pairs.
+ ASSERT_TRUE(
+ AddIceCandidates(bob.get(), alice->observer()->GetAllCandidates()));
+ ASSERT_TRUE(
+ AddIceCandidates(alice.get(), bob->observer()->GetAllCandidates()));
+ // This means that ICE and DTLS are connected.
+ ASSERT_TRUE_WAIT(bob->IsIceConnected(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(alice->IsIceConnected(), kDefaultTimeoutMs);
+
+ // Close peer connections
+ alice->pc()->Close();
+ bob->pc()->Close();
+
+ // Delete peers.
+ alice.reset();
+ bob.reset();
+ });
+}
+
+TEST(NetworkEmulationManagerPCTest, RunTURN) {
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName(kSignalThreadName, nullptr);
+ signaling_thread->Start();
+
+ // Setup emulated network
+ NetworkEmulationManagerImpl emulation(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+
+ EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = emulation.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* turn_node = emulation.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedEndpoint* alice_endpoint =
+ emulation.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ emulation.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedTURNServerInterface* alice_turn =
+ emulation.CreateTURNServer(EmulatedTURNServerConfig());
+ EmulatedTURNServerInterface* bob_turn =
+ emulation.CreateTURNServer(EmulatedTURNServerConfig());
+
+ emulation.CreateRoute(alice_endpoint, {alice_node},
+ alice_turn->GetClientEndpoint());
+ emulation.CreateRoute(alice_turn->GetClientEndpoint(), {alice_node},
+ alice_endpoint);
+
+ emulation.CreateRoute(bob_endpoint, {bob_node},
+ bob_turn->GetClientEndpoint());
+ emulation.CreateRoute(bob_turn->GetClientEndpoint(), {bob_node},
+ bob_endpoint);
+
+ emulation.CreateRoute(alice_turn->GetPeerEndpoint(), {turn_node},
+ bob_turn->GetPeerEndpoint());
+ emulation.CreateRoute(bob_turn->GetPeerEndpoint(), {turn_node},
+ alice_turn->GetPeerEndpoint());
+
+ EmulatedNetworkManagerInterface* alice_network =
+ emulation.CreateEmulatedNetworkManagerInterface({alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ emulation.CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ // Setup peer connections.
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> alice_pcf;
+ rtc::scoped_refptr<PeerConnectionInterface> alice_pc;
+ std::unique_ptr<MockPeerConnectionObserver> alice_observer =
+ std::make_unique<MockPeerConnectionObserver>();
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> bob_pcf;
+ rtc::scoped_refptr<PeerConnectionInterface> bob_pc;
+ std::unique_ptr<MockPeerConnectionObserver> bob_observer =
+ std::make_unique<MockPeerConnectionObserver>();
+
+ SendTask(signaling_thread.get(), [&]() {
+ alice_pcf = CreatePeerConnectionFactory(signaling_thread.get(),
+ alice_network->network_thread());
+ alice_pc = CreatePeerConnection(
+ alice_pcf, alice_observer.get(), alice_network->packet_socket_factory(),
+ alice_network->network_manager(), alice_turn);
+
+ bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(),
+ bob_network->network_thread());
+ bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get(),
+ bob_network->packet_socket_factory(),
+ bob_network->network_manager(), bob_turn);
+ });
+
+ std::unique_ptr<PeerConnectionWrapper> alice =
+ std::make_unique<PeerConnectionWrapper>(alice_pcf, alice_pc,
+ std::move(alice_observer));
+ std::unique_ptr<PeerConnectionWrapper> bob =
+ std::make_unique<PeerConnectionWrapper>(bob_pcf, bob_pc,
+ std::move(bob_observer));
+
+ SendTask(signaling_thread.get(), [&]() {
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ alice_pcf->CreateAudioSource(cricket::AudioOptions());
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ alice_pcf->CreateAudioTrack("audio", source.get());
+ alice->AddTransceiver(track);
+
+ // Connect peers.
+ ASSERT_TRUE(alice->ExchangeOfferAnswerWith(bob.get()));
+ // Do the SDP negotiation, and also exchange ice candidates.
+ ASSERT_TRUE_WAIT(
+ alice->signaling_state() == PeerConnectionInterface::kStable,
+ kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(alice->IsIceGatheringDone(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(bob->IsIceGatheringDone(), kDefaultTimeoutMs);
+
+ // Connect an ICE candidate pairs.
+ ASSERT_TRUE(
+ AddIceCandidates(bob.get(), alice->observer()->GetAllCandidates()));
+ ASSERT_TRUE(
+ AddIceCandidates(alice.get(), bob->observer()->GetAllCandidates()));
+ // This means that ICE and DTLS are connected.
+ ASSERT_TRUE_WAIT(bob->IsIceConnected(), kDefaultTimeoutMs);
+ ASSERT_TRUE_WAIT(alice->IsIceConnected(), kDefaultTimeoutMs);
+
+ // Close peer connections
+ alice->pc()->Close();
+ bob->pc()->Close();
+
+ // Delete peers.
+ alice.reset();
+ bob.reset();
+ });
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/network_emulation_unittest.cc b/third_party/libwebrtc/test/network/network_emulation_unittest.cc
new file mode 100644
index 0000000000..2e67a5a00a
--- /dev/null
+++ b/third_party/libwebrtc/test/network/network_emulation_unittest.cc
@@ -0,0 +1,676 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/network_emulation.h"
+
+#include <atomic>
+#include <memory>
+#include <set>
+
+#include "api/test/simulated_network.h"
+#include "api/units/time_delta.h"
+#include "call/simulated_network.h"
+#include "rtc_base/event.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/network/network_emulation_manager.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+using ::testing::ElementsAreArray;
+
+constexpr TimeDelta kNetworkPacketWaitTimeout = TimeDelta::Millis(100);
+constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1);
+constexpr int kOverheadIpv4Udp = 20 + 8;
+
+class SocketReader : public sigslot::has_slots<> {
+ public:
+ explicit SocketReader(rtc::Socket* socket, rtc::Thread* network_thread)
+ : socket_(socket), network_thread_(network_thread) {
+ socket_->SignalReadEvent.connect(this, &SocketReader::OnReadEvent);
+ size_ = 128 * 1024;
+ buf_ = new char[size_];
+ }
+ ~SocketReader() override { delete[] buf_; }
+
+ void OnReadEvent(rtc::Socket* socket) {
+ RTC_DCHECK(socket_ == socket);
+ RTC_DCHECK(network_thread_->IsCurrent());
+ int64_t timestamp;
+ len_ = socket_->Recv(buf_, size_, &timestamp);
+
+ MutexLock lock(&lock_);
+ received_count_++;
+ }
+
+ int ReceivedCount() {
+ MutexLock lock(&lock_);
+ return received_count_;
+ }
+
+ private:
+ rtc::Socket* const socket_;
+ rtc::Thread* const network_thread_;
+ char* buf_;
+ size_t size_;
+ int len_;
+
+ Mutex lock_;
+ int received_count_ RTC_GUARDED_BY(lock_) = 0;
+};
+
+class MockReceiver : public EmulatedNetworkReceiverInterface {
+ public:
+ MOCK_METHOD(void, OnPacketReceived, (EmulatedIpPacket packet), (override));
+};
+
+class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test {
+ public:
+ NetworkEmulationManagerThreeNodesRoutingTest() {
+ e1_ = emulation_.CreateEndpoint(EmulatedEndpointConfig());
+ e2_ = emulation_.CreateEndpoint(EmulatedEndpointConfig());
+ e3_ = emulation_.CreateEndpoint(EmulatedEndpointConfig());
+ }
+
+ void SetupRouting(
+ std::function<void(EmulatedEndpoint*,
+ EmulatedEndpoint*,
+ EmulatedEndpoint*,
+ NetworkEmulationManager*)> create_routing_func) {
+ create_routing_func(e1_, e2_, e3_, &emulation_);
+ }
+
+ void SendPacketsAndValidateDelivery() {
+ EXPECT_CALL(r_e1_e2_, OnPacketReceived(::testing::_)).Times(1);
+ EXPECT_CALL(r_e2_e1_, OnPacketReceived(::testing::_)).Times(1);
+ EXPECT_CALL(r_e1_e3_, OnPacketReceived(::testing::_)).Times(1);
+ EXPECT_CALL(r_e3_e1_, OnPacketReceived(::testing::_)).Times(1);
+
+ uint16_t common_send_port = 80;
+ uint16_t r_e1_e2_port = e2_->BindReceiver(0, &r_e1_e2_).value();
+ uint16_t r_e2_e1_port = e1_->BindReceiver(0, &r_e2_e1_).value();
+ uint16_t r_e1_e3_port = e3_->BindReceiver(0, &r_e1_e3_).value();
+ uint16_t r_e3_e1_port = e1_->BindReceiver(0, &r_e3_e1_).value();
+
+ // Next code is using API of EmulatedEndpoint, that is visible only for
+ // internals of network emulation layer. Don't use this API in other tests.
+ // Send packet from e1 to e2.
+ e1_->SendPacket(
+ rtc::SocketAddress(e1_->GetPeerLocalAddress(), common_send_port),
+ rtc::SocketAddress(e2_->GetPeerLocalAddress(), r_e1_e2_port),
+ rtc::CopyOnWriteBuffer(10));
+
+ // Send packet from e2 to e1.
+ e2_->SendPacket(
+ rtc::SocketAddress(e2_->GetPeerLocalAddress(), common_send_port),
+ rtc::SocketAddress(e1_->GetPeerLocalAddress(), r_e2_e1_port),
+ rtc::CopyOnWriteBuffer(10));
+
+ // Send packet from e1 to e3.
+ e1_->SendPacket(
+ rtc::SocketAddress(e1_->GetPeerLocalAddress(), common_send_port),
+ rtc::SocketAddress(e3_->GetPeerLocalAddress(), r_e1_e3_port),
+ rtc::CopyOnWriteBuffer(10));
+
+ // Send packet from e3 to e1.
+ e3_->SendPacket(
+ rtc::SocketAddress(e3_->GetPeerLocalAddress(), common_send_port),
+ rtc::SocketAddress(e1_->GetPeerLocalAddress(), r_e3_e1_port),
+ rtc::CopyOnWriteBuffer(10));
+
+ // Sleep at the end to wait for async packets delivery.
+ emulation_.time_controller()->AdvanceTime(kNetworkPacketWaitTimeout);
+ }
+
+ private:
+ // Receivers: r_<source endpoint>_<destination endpoint>
+ // They must be destroyed after emulation, so they should be declared before.
+ MockReceiver r_e1_e2_;
+ MockReceiver r_e2_e1_;
+ MockReceiver r_e1_e3_;
+ MockReceiver r_e3_e1_;
+
+ NetworkEmulationManagerImpl emulation_{
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault};
+ EmulatedEndpoint* e1_;
+ EmulatedEndpoint* e2_;
+ EmulatedEndpoint* e3_;
+};
+
+EmulatedNetworkNode* CreateEmulatedNodeWithDefaultBuiltInConfig(
+ NetworkEmulationManager* emulation) {
+ return emulation->CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+}
+
+} // namespace
+
+using ::testing::_;
+
+TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+ std::set<rtc::IPAddress> ips;
+ EmulatedEndpointConfig config;
+ config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv4;
+ for (int i = 0; i < 1000; i++) {
+ EmulatedEndpoint* endpoint = network_manager.CreateEndpoint(config);
+ ASSERT_EQ(endpoint->GetPeerLocalAddress().family(), AF_INET);
+ bool result = ips.insert(endpoint->GetPeerLocalAddress()).second;
+ ASSERT_TRUE(result);
+ }
+}
+
+TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+ std::set<rtc::IPAddress> ips;
+ EmulatedEndpointConfig config;
+ config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv6;
+ for (int i = 0; i < 1000; i++) {
+ EmulatedEndpoint* endpoint = network_manager.CreateEndpoint(config);
+ ASSERT_EQ(endpoint->GetPeerLocalAddress().family(), AF_INET6);
+ bool result = ips.insert(endpoint->GetPeerLocalAddress()).second;
+ ASSERT_TRUE(result);
+ }
+}
+
+TEST(NetworkEmulationManagerTest, Run) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+
+ EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedEndpoint* alice_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint);
+ network_manager.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* nt1 =
+ network_manager.CreateEmulatedNetworkManagerInterface({alice_endpoint});
+ EmulatedNetworkManagerInterface* nt2 =
+ network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ rtc::Thread* t1 = nt1->network_thread();
+ rtc::Thread* t2 = nt2->network_thread();
+
+ rtc::CopyOnWriteBuffer data("Hello");
+ for (uint64_t j = 0; j < 2; j++) {
+ rtc::Socket* s1 = nullptr;
+ rtc::Socket* s2 = nullptr;
+ SendTask(t1, [&] {
+ s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM);
+ });
+ SendTask(t2, [&] {
+ s2 = t2->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM);
+ });
+
+ SocketReader r1(s1, t1);
+ SocketReader r2(s2, t2);
+
+ rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0);
+ rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0);
+
+ SendTask(t1, [&] {
+ s1->Bind(a1);
+ a1 = s1->GetLocalAddress();
+ });
+ SendTask(t2, [&] {
+ s2->Bind(a2);
+ a2 = s2->GetLocalAddress();
+ });
+
+ SendTask(t1, [&] { s1->Connect(a2); });
+ SendTask(t2, [&] { s2->Connect(a1); });
+
+ for (uint64_t i = 0; i < 1000; i++) {
+ t1->PostTask([&]() { s1->Send(data.data(), data.size()); });
+ t2->PostTask([&]() { s2->Send(data.data(), data.size()); });
+ }
+
+ network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+
+ EXPECT_EQ(r1.ReceivedCount(), 1000);
+ EXPECT_EQ(r2.ReceivedCount(), 1000);
+
+ SendTask(t1, [&] { delete s1; });
+ SendTask(t2, [&] { delete s2; });
+ }
+
+ const int64_t single_packet_size = data.size() + kOverheadIpv4Udp;
+ std::atomic<int> received_stats_count{0};
+ nt1->GetStats([&](EmulatedNetworkStats st) {
+ EXPECT_EQ(st.PacketsSent(), 2000l);
+ EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l);
+ EXPECT_THAT(st.local_addresses,
+ ElementsAreArray({alice_endpoint->GetPeerLocalAddress()}));
+ EXPECT_EQ(st.PacketsReceived(), 2000l);
+ EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l);
+ EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l);
+ EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l);
+
+ rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress();
+ std::map<rtc::IPAddress, EmulatedNetworkIncomingStats> source_st =
+ st.incoming_stats_per_source;
+ ASSERT_EQ(source_st.size(), 1lu);
+ EXPECT_EQ(source_st.at(bob_ip).packets_received, 2000l);
+ EXPECT_EQ(source_st.at(bob_ip).bytes_received.bytes(),
+ single_packet_size * 2000l);
+ EXPECT_EQ(source_st.at(bob_ip).packets_discarded_no_receiver, 0l);
+ EXPECT_EQ(source_st.at(bob_ip).bytes_discarded_no_receiver.bytes(), 0l);
+
+ std::map<rtc::IPAddress, EmulatedNetworkOutgoingStats> dest_st =
+ st.outgoing_stats_per_destination;
+ ASSERT_EQ(dest_st.size(), 1lu);
+ EXPECT_EQ(dest_st.at(bob_ip).packets_sent, 2000l);
+ EXPECT_EQ(dest_st.at(bob_ip).bytes_sent.bytes(),
+ single_packet_size * 2000l);
+
+ // No debug stats are collected by default.
+ EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty());
+ EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty());
+ EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty());
+ EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty());
+ EXPECT_TRUE(dest_st.at(bob_ip).sent_packets_size.IsEmpty());
+ EXPECT_TRUE(source_st.at(bob_ip).received_packets_size.IsEmpty());
+ EXPECT_TRUE(
+ source_st.at(bob_ip).packets_discarded_no_receiver_size.IsEmpty());
+
+ received_stats_count++;
+ });
+ nt2->GetStats([&](EmulatedNetworkStats st) {
+ EXPECT_EQ(st.PacketsSent(), 2000l);
+ EXPECT_EQ(st.BytesSent().bytes(), single_packet_size * 2000l);
+ EXPECT_THAT(st.local_addresses,
+ ElementsAreArray({bob_endpoint->GetPeerLocalAddress()}));
+ EXPECT_EQ(st.PacketsReceived(), 2000l);
+ EXPECT_EQ(st.BytesReceived().bytes(), single_packet_size * 2000l);
+ EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l);
+ EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l);
+ EXPECT_GT(st.FirstReceivedPacketSize(), DataSize::Zero());
+ EXPECT_TRUE(st.FirstPacketReceivedTime().IsFinite());
+ EXPECT_TRUE(st.LastPacketReceivedTime().IsFinite());
+
+ rtc::IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress();
+ std::map<rtc::IPAddress, EmulatedNetworkIncomingStats> source_st =
+ st.incoming_stats_per_source;
+ ASSERT_EQ(source_st.size(), 1lu);
+ EXPECT_EQ(source_st.at(alice_ip).packets_received, 2000l);
+ EXPECT_EQ(source_st.at(alice_ip).bytes_received.bytes(),
+ single_packet_size * 2000l);
+ EXPECT_EQ(source_st.at(alice_ip).packets_discarded_no_receiver, 0l);
+ EXPECT_EQ(source_st.at(alice_ip).bytes_discarded_no_receiver.bytes(), 0l);
+
+ std::map<rtc::IPAddress, EmulatedNetworkOutgoingStats> dest_st =
+ st.outgoing_stats_per_destination;
+ ASSERT_EQ(dest_st.size(), 1lu);
+ EXPECT_EQ(dest_st.at(alice_ip).packets_sent, 2000l);
+ EXPECT_EQ(dest_st.at(alice_ip).bytes_sent.bytes(),
+ single_packet_size * 2000l);
+
+ // No debug stats are collected by default.
+ EXPECT_TRUE(st.SentPacketsSizeCounter().IsEmpty());
+ EXPECT_TRUE(st.sent_packets_queue_wait_time_us.IsEmpty());
+ EXPECT_TRUE(st.ReceivedPacketsSizeCounter().IsEmpty());
+ EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty());
+ EXPECT_TRUE(dest_st.at(alice_ip).sent_packets_size.IsEmpty());
+ EXPECT_TRUE(source_st.at(alice_ip).received_packets_size.IsEmpty());
+ EXPECT_TRUE(
+ source_st.at(alice_ip).packets_discarded_no_receiver_size.IsEmpty());
+
+ received_stats_count++;
+ });
+ ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 2,
+ kStatsWaitTimeout.ms(),
+ *network_manager.time_controller());
+}
+
+TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDebug);
+
+ EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedEndpoint* alice_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint);
+ network_manager.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* nt1 =
+ network_manager.CreateEmulatedNetworkManagerInterface({alice_endpoint});
+ EmulatedNetworkManagerInterface* nt2 =
+ network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ rtc::Thread* t1 = nt1->network_thread();
+ rtc::Thread* t2 = nt2->network_thread();
+
+ rtc::CopyOnWriteBuffer data("Hello");
+ for (uint64_t j = 0; j < 2; j++) {
+ rtc::Socket* s1 = nullptr;
+ rtc::Socket* s2 = nullptr;
+ SendTask(t1, [&] {
+ s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM);
+ });
+ SendTask(t2, [&] {
+ s2 = t2->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM);
+ });
+
+ SocketReader r1(s1, t1);
+ SocketReader r2(s2, t2);
+
+ rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0);
+ rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0);
+
+ SendTask(t1, [&] {
+ s1->Bind(a1);
+ a1 = s1->GetLocalAddress();
+ });
+ SendTask(t2, [&] {
+ s2->Bind(a2);
+ a2 = s2->GetLocalAddress();
+ });
+
+ SendTask(t1, [&] { s1->Connect(a2); });
+ SendTask(t2, [&] { s2->Connect(a1); });
+
+ for (uint64_t i = 0; i < 1000; i++) {
+ t1->PostTask([&]() { s1->Send(data.data(), data.size()); });
+ t2->PostTask([&]() { s2->Send(data.data(), data.size()); });
+ }
+
+ network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+
+ EXPECT_EQ(r1.ReceivedCount(), 1000);
+ EXPECT_EQ(r2.ReceivedCount(), 1000);
+
+ SendTask(t1, [&] { delete s1; });
+ SendTask(t2, [&] { delete s2; });
+ }
+
+ const int64_t single_packet_size = data.size() + kOverheadIpv4Udp;
+ std::atomic<int> received_stats_count{0};
+ nt1->GetStats([&](EmulatedNetworkStats st) {
+ rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress();
+ std::map<rtc::IPAddress, EmulatedNetworkIncomingStats> source_st =
+ st.incoming_stats_per_source;
+ ASSERT_EQ(source_st.size(), 1lu);
+
+ std::map<rtc::IPAddress, EmulatedNetworkOutgoingStats> dest_st =
+ st.outgoing_stats_per_destination;
+ ASSERT_EQ(dest_st.size(), 1lu);
+
+ // No debug stats are collected by default.
+ EXPECT_EQ(st.SentPacketsSizeCounter().NumSamples(), 2000l);
+ EXPECT_EQ(st.ReceivedPacketsSizeCounter().GetAverage(), single_packet_size);
+ EXPECT_EQ(st.sent_packets_queue_wait_time_us.NumSamples(), 2000l);
+ EXPECT_LT(st.sent_packets_queue_wait_time_us.GetMax(), 1);
+ EXPECT_TRUE(st.PacketsDiscardedNoReceiverSizeCounter().IsEmpty());
+ EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.NumSamples(), 2000l);
+ EXPECT_EQ(dest_st.at(bob_ip).sent_packets_size.GetAverage(),
+ single_packet_size);
+ EXPECT_EQ(source_st.at(bob_ip).received_packets_size.NumSamples(), 2000l);
+ EXPECT_EQ(source_st.at(bob_ip).received_packets_size.GetAverage(),
+ single_packet_size);
+ EXPECT_TRUE(
+ source_st.at(bob_ip).packets_discarded_no_receiver_size.IsEmpty());
+
+ received_stats_count++;
+ });
+ ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 1,
+ kStatsWaitTimeout.ms(),
+ *network_manager.time_controller());
+}
+
+TEST(NetworkEmulationManagerTest, ThroughputStats) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+
+ EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedEndpoint* alice_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint);
+ network_manager.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* nt1 =
+ network_manager.CreateEmulatedNetworkManagerInterface({alice_endpoint});
+ EmulatedNetworkManagerInterface* nt2 =
+ network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ rtc::Thread* t1 = nt1->network_thread();
+ rtc::Thread* t2 = nt2->network_thread();
+
+ constexpr int64_t kUdpPayloadSize = 100;
+ constexpr int64_t kSinglePacketSize = kUdpPayloadSize + kOverheadIpv4Udp;
+ rtc::CopyOnWriteBuffer data(kUdpPayloadSize);
+
+ rtc::Socket* s1 = nullptr;
+ rtc::Socket* s2 = nullptr;
+ SendTask(t1,
+ [&] { s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); });
+ SendTask(t2,
+ [&] { s2 = t2->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); });
+
+ SocketReader r1(s1, t1);
+ SocketReader r2(s2, t2);
+
+ rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0);
+ rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0);
+
+ SendTask(t1, [&] {
+ s1->Bind(a1);
+ a1 = s1->GetLocalAddress();
+ });
+ SendTask(t2, [&] {
+ s2->Bind(a2);
+ a2 = s2->GetLocalAddress();
+ });
+
+ SendTask(t1, [&] { s1->Connect(a2); });
+ SendTask(t2, [&] { s2->Connect(a1); });
+
+ // Send 11 packets, totalizing 1 second between the first and the last->
+ const int kNumPacketsSent = 11;
+ const TimeDelta kDelay = TimeDelta::Millis(100);
+ for (int i = 0; i < kNumPacketsSent; i++) {
+ t1->PostTask([&]() { s1->Send(data.data(), data.size()); });
+ t2->PostTask([&]() { s2->Send(data.data(), data.size()); });
+ network_manager.time_controller()->AdvanceTime(kDelay);
+ }
+
+ std::atomic<int> received_stats_count{0};
+ nt1->GetStats([&](EmulatedNetworkStats st) {
+ EXPECT_EQ(st.PacketsSent(), kNumPacketsSent);
+ EXPECT_EQ(st.BytesSent().bytes(), kSinglePacketSize * kNumPacketsSent);
+
+ const double tolerance = 0.95; // Accept 5% tolerance for timing.
+ EXPECT_GE(st.LastPacketSentTime() - st.FirstPacketSentTime(),
+ (kNumPacketsSent - 1) * kDelay * tolerance);
+ EXPECT_GT(st.AverageSendRate().bps(), 0);
+ received_stats_count++;
+ });
+
+ ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 1,
+ kStatsWaitTimeout.ms(),
+ *network_manager.time_controller());
+
+ EXPECT_EQ(r1.ReceivedCount(), 11);
+ EXPECT_EQ(r2.ReceivedCount(), 11);
+
+ SendTask(t1, [&] { delete s1; });
+ SendTask(t2, [&] { delete s2; });
+}
+
+// Testing that packets are delivered via all routes using a routing scheme as
+// follows:
+// * e1 -> n1 -> e2
+// * e2 -> n2 -> e1
+// * e1 -> n3 -> e3
+// * e3 -> n4 -> e1
+TEST_F(NetworkEmulationManagerThreeNodesRoutingTest,
+ PacketsAreDeliveredInBothWaysWhenConnectedToTwoPeers) {
+ SetupRouting([](EmulatedEndpoint* e1, EmulatedEndpoint* e2,
+ EmulatedEndpoint* e3, NetworkEmulationManager* emulation) {
+ auto* node1 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+ auto* node2 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+ auto* node3 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+ auto* node4 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+
+ emulation->CreateRoute(e1, {node1}, e2);
+ emulation->CreateRoute(e2, {node2}, e1);
+
+ emulation->CreateRoute(e1, {node3}, e3);
+ emulation->CreateRoute(e3, {node4}, e1);
+ });
+ SendPacketsAndValidateDelivery();
+}
+
+// Testing that packets are delivered via all routes using a routing scheme as
+// follows:
+// * e1 -> n1 -> e2
+// * e2 -> n2 -> e1
+// * e1 -> n1 -> e3
+// * e3 -> n4 -> e1
+TEST_F(NetworkEmulationManagerThreeNodesRoutingTest,
+ PacketsAreDeliveredInBothWaysWhenConnectedToTwoPeersOverSameSendLink) {
+ SetupRouting([](EmulatedEndpoint* e1, EmulatedEndpoint* e2,
+ EmulatedEndpoint* e3, NetworkEmulationManager* emulation) {
+ auto* node1 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+ auto* node2 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+ auto* node3 = CreateEmulatedNodeWithDefaultBuiltInConfig(emulation);
+
+ emulation->CreateRoute(e1, {node1}, e2);
+ emulation->CreateRoute(e2, {node2}, e1);
+
+ emulation->CreateRoute(e1, {node1}, e3);
+ emulation->CreateRoute(e3, {node3}, e1);
+ });
+ SendPacketsAndValidateDelivery();
+}
+
+TEST(NetworkEmulationManagerTest, EndpointLoopback) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault);
+ auto endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig());
+
+ MockReceiver receiver;
+ EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1);
+ ASSERT_EQ(endpoint->BindReceiver(80, &receiver), 80);
+
+ endpoint->SendPacket(rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80),
+ rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80),
+ "Hello");
+ network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+}
+
+TEST(NetworkEmulationManagerTest, EndpointCanSendWithDifferentSourceIp) {
+ constexpr uint32_t kEndpointIp = 0xC0A80011; // 192.168.0.17
+ constexpr uint32_t kSourceIp = 0xC0A80012; // 192.168.0.18
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault);
+ EmulatedEndpointConfig endpoint_config;
+ endpoint_config.ip = rtc::IPAddress(kEndpointIp);
+ endpoint_config.allow_send_packet_with_different_source_ip = true;
+ auto endpoint = network_manager.CreateEndpoint(endpoint_config);
+
+ MockReceiver receiver;
+ EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1);
+ ASSERT_EQ(endpoint->BindReceiver(80, &receiver), 80);
+
+ endpoint->SendPacket(rtc::SocketAddress(kSourceIp, 80),
+ rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80),
+ "Hello");
+ network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+}
+
+TEST(NetworkEmulationManagerTest,
+ EndpointCanReceiveWithDifferentDestIpThroughDefaultRoute) {
+ constexpr uint32_t kDestEndpointIp = 0xC0A80011; // 192.168.0.17
+ constexpr uint32_t kDestIp = 0xC0A80012; // 192.168.0.18
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault);
+ auto sender_endpoint =
+ network_manager.CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpointConfig endpoint_config;
+ endpoint_config.ip = rtc::IPAddress(kDestEndpointIp);
+ endpoint_config.allow_receive_packets_with_different_dest_ip = true;
+ auto receiver_endpoint = network_manager.CreateEndpoint(endpoint_config);
+
+ MockReceiver receiver;
+ EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1);
+ ASSERT_EQ(receiver_endpoint->BindReceiver(80, &receiver), 80);
+
+ network_manager.CreateDefaultRoute(
+ sender_endpoint, {network_manager.NodeBuilder().Build().node},
+ receiver_endpoint);
+
+ sender_endpoint->SendPacket(
+ rtc::SocketAddress(sender_endpoint->GetPeerLocalAddress(), 80),
+ rtc::SocketAddress(kDestIp, 80), "Hello");
+ network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+}
+
+TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) {
+ NetworkEmulationManagerImpl network_manager(
+ TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault);
+ auto turn = network_manager.CreateTURNServer(EmulatedTURNServerConfig());
+
+ EXPECT_GT(turn->GetIceServerConfig().username.size(), 0u);
+ EXPECT_GT(turn->GetIceServerConfig().password.size(), 0u);
+ EXPECT_NE(turn->GetIceServerConfig().url.find(
+ turn->GetClientEndpoint()->GetPeerLocalAddress().ToString()),
+ std::string::npos);
+}
+
+TEST(NetworkEmulationManagerTURNTest, ClientTraffic) {
+ NetworkEmulationManagerImpl emulation(
+ TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault);
+ auto* ep = emulation.CreateEndpoint(EmulatedEndpointConfig());
+ auto* turn = emulation.CreateTURNServer(EmulatedTURNServerConfig());
+ auto* node = CreateEmulatedNodeWithDefaultBuiltInConfig(&emulation);
+ emulation.CreateRoute(ep, {node}, turn->GetClientEndpoint());
+ emulation.CreateRoute(turn->GetClientEndpoint(), {node}, ep);
+
+ MockReceiver recv;
+ int port = ep->BindReceiver(0, &recv).value();
+
+ // Construct a STUN BINDING.
+ cricket::StunMessage ping(cricket::STUN_BINDING_REQUEST);
+ rtc::ByteBufferWriter buf;
+ ping.Write(&buf);
+ rtc::CopyOnWriteBuffer packet(buf.Data(), buf.Length());
+
+ // We expect to get a ping reply.
+ EXPECT_CALL(recv, OnPacketReceived(::testing::_)).Times(1);
+
+ ep->SendPacket(rtc::SocketAddress(ep->GetPeerLocalAddress(), port),
+ turn->GetClientEndpointAddress(), packet);
+ emulation.time_controller()->AdvanceTime(TimeDelta::Seconds(1));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/traffic_route.cc b/third_party/libwebrtc/test/network/traffic_route.cc
new file mode 100644
index 0000000000..81bb8ca514
--- /dev/null
+++ b/third_party/libwebrtc/test/network/traffic_route.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/network/traffic_route.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+class NullReceiver : public EmulatedNetworkReceiverInterface {
+ public:
+ void OnPacketReceived(EmulatedIpPacket packet) override {}
+};
+
+class ActionReceiver : public EmulatedNetworkReceiverInterface {
+ public:
+ explicit ActionReceiver(std::function<void()> action) : action_(action) {}
+ ~ActionReceiver() override = default;
+
+ void OnPacketReceived(EmulatedIpPacket packet) override {
+ action_();
+ }
+
+ private:
+ std::function<void()> action_;
+};
+
+} // namespace
+
+CrossTrafficRouteImpl::CrossTrafficRouteImpl(
+ Clock* clock,
+ EmulatedNetworkReceiverInterface* receiver,
+ EmulatedEndpointImpl* endpoint)
+ : clock_(clock), receiver_(receiver), endpoint_(endpoint) {
+ null_receiver_ = std::make_unique<NullReceiver>();
+ absl::optional<uint16_t> port =
+ endpoint_->BindReceiver(0, null_receiver_.get());
+ RTC_DCHECK(port);
+ null_receiver_port_ = port.value();
+}
+CrossTrafficRouteImpl::~CrossTrafficRouteImpl() = default;
+
+void CrossTrafficRouteImpl::TriggerPacketBurst(size_t num_packets,
+ size_t packet_size) {
+ for (size_t i = 0; i < num_packets; ++i) {
+ SendPacket(packet_size);
+ }
+}
+
+void CrossTrafficRouteImpl::NetworkDelayedAction(size_t packet_size,
+ std::function<void()> action) {
+ auto action_receiver = std::make_unique<ActionReceiver>(action);
+ // BindOneShotReceiver arranges to free the port in the endpoint after the
+ // action is done.
+ absl::optional<uint16_t> port =
+ endpoint_->BindOneShotReceiver(0, action_receiver.get());
+ RTC_DCHECK(port);
+ actions_.push_back(std::move(action_receiver));
+ SendPacket(packet_size, port.value());
+}
+
+void CrossTrafficRouteImpl::SendPacket(size_t packet_size) {
+ SendPacket(packet_size, null_receiver_port_);
+}
+
+void CrossTrafficRouteImpl::SendPacket(size_t packet_size, uint16_t dest_port) {
+ rtc::CopyOnWriteBuffer data(packet_size);
+ std::fill_n(data.MutableData(), data.size(), 0);
+ receiver_->OnPacketReceived(EmulatedIpPacket(
+ /*from=*/rtc::SocketAddress(),
+ rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), dest_port), data,
+ clock_->CurrentTime()));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/network/traffic_route.h b/third_party/libwebrtc/test/network/traffic_route.h
new file mode 100644
index 0000000000..dbc41a694f
--- /dev/null
+++ b/third_party/libwebrtc/test/network/traffic_route.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_NETWORK_TRAFFIC_ROUTE_H_
+#define TEST_NETWORK_TRAFFIC_ROUTE_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/test/network_emulation_manager.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "system_wrappers/include/clock.h"
+#include "test/network/network_emulation.h"
+
+namespace webrtc {
+namespace test {
+
+// Represents the endpoint for cross traffic that is going through the network.
+// It can be used to emulate unexpected network load.
+class CrossTrafficRouteImpl final : public CrossTrafficRoute {
+ public:
+ CrossTrafficRouteImpl(Clock* clock,
+ EmulatedNetworkReceiverInterface* receiver,
+ EmulatedEndpointImpl* endpoint);
+ ~CrossTrafficRouteImpl();
+
+ // Triggers sending of dummy packets with size `packet_size` bytes.
+ void TriggerPacketBurst(size_t num_packets, size_t packet_size) override;
+ // Sends a packet over the nodes and runs `action` when it has been delivered.
+ void NetworkDelayedAction(size_t packet_size,
+ std::function<void()> action) override;
+
+ void SendPacket(size_t packet_size) override;
+
+ private:
+ void SendPacket(size_t packet_size, uint16_t dest_port);
+
+ Clock* const clock_;
+ EmulatedNetworkReceiverInterface* const receiver_;
+ EmulatedEndpointImpl* const endpoint_;
+
+ uint16_t null_receiver_port_;
+ std::unique_ptr<EmulatedNetworkReceiverInterface> null_receiver_;
+ std::vector<std::unique_ptr<EmulatedNetworkReceiverInterface>> actions_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NETWORK_TRAFFIC_ROUTE_H_
diff --git a/third_party/libwebrtc/test/null_platform_renderer.cc b/third_party/libwebrtc/test/null_platform_renderer.cc
new file mode 100644
index 0000000000..7ea604ead8
--- /dev/null
+++ b/third_party/libwebrtc/test/null_platform_renderer.cc
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+VideoRenderer* VideoRenderer::CreatePlatformRenderer(const char* window_title,
+ size_t width,
+ size_t height) {
+ return NULL;
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/null_transport.cc b/third_party/libwebrtc/test/null_transport.cc
new file mode 100644
index 0000000000..efbd9499d8
--- /dev/null
+++ b/third_party/libwebrtc/test/null_transport.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/null_transport.h"
+
+namespace webrtc {
+namespace test {
+
+bool NullTransport::SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) {
+ return true;
+}
+
+bool NullTransport::SendRtcp(const uint8_t* packet, size_t length) {
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/null_transport.h b/third_party/libwebrtc/test/null_transport.h
new file mode 100644
index 0000000000..f264e7b45a
--- /dev/null
+++ b/third_party/libwebrtc/test/null_transport.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_NULL_TRANSPORT_H_
+#define TEST_NULL_TRANSPORT_H_
+
+#include "api/call/transport.h"
+
+namespace webrtc {
+
+class PacketReceiver;
+
+namespace test {
+class NullTransport : public Transport {
+ public:
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* packet, size_t length) override;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_NULL_TRANSPORT_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/BUILD.gn b/third_party/libwebrtc/test/pc/e2e/BUILD.gn
new file mode 100644
index 0000000000..7354aa8ba4
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/BUILD.gn
@@ -0,0 +1,573 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("metric_metadata_keys") {
+ testonly = true
+ sources = [ "metric_metadata_keys.h" ]
+}
+
+if (!build_with_chromium) {
+ group("e2e") {
+ testonly = true
+
+ deps = [ ":metric_metadata_keys" ]
+ if (rtc_include_tests) {
+ deps += [
+ ":peerconnection_quality_test",
+ ":test_peer",
+ ]
+ }
+ }
+
+ if (rtc_include_tests) {
+ group("e2e_unittests") {
+ testonly = true
+
+ deps = [
+ ":peer_connection_e2e_smoke_test",
+ ":peer_connection_quality_test_metric_names_test",
+ ":peer_connection_quality_test_test",
+ ":stats_based_network_quality_metrics_reporter_test",
+ ":stats_poller_test",
+ ]
+ }
+ }
+
+ if (rtc_include_tests) {
+ rtc_library("echo_emulation") {
+ testonly = true
+ sources = [
+ "echo/echo_emulation.cc",
+ "echo/echo_emulation.h",
+ ]
+ deps = [
+ "../../../api/test/pclf:media_configuration",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../rtc_base:swap_queue",
+ ]
+ }
+
+ rtc_library("test_peer") {
+ testonly = true
+ sources = [
+ "test_peer.cc",
+ "test_peer.h",
+ ]
+ deps = [
+ ":stats_provider",
+ "../../../api:frame_generator_api",
+ "../../../api:function_view",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:scoped_refptr",
+ "../../../api:sequence_checker",
+ "../../../api/task_queue:pending_task_safety_flag",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../modules/audio_processing:api",
+ "../../../pc:peerconnection_wrapper",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:refcount",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:variant",
+ ]
+ }
+
+ rtc_library("test_peer_factory") {
+ testonly = true
+ sources = [
+ "test_peer_factory.cc",
+ "test_peer_factory.h",
+ ]
+ deps = [
+ ":echo_emulation",
+ ":test_peer",
+ "../..:copy_to_file_audio_capturer",
+ "../../../api:create_time_controller",
+ "../../../api:time_controller",
+ "../../../api/rtc_event_log:rtc_event_log_factory",
+ "../../../api/task_queue:default_task_queue_factory",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/transport:field_trial_based_config",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../media:rtc_audio_video",
+ "../../../media:rtc_media_engine_defaults",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../modules/audio_processing/aec_dump",
+ "../../../p2p:rtc_p2p",
+ "../../../rtc_base:rtc_task_queue",
+ "../../../rtc_base:threading",
+ "analyzer/video:quality_analyzing_video_encoder",
+ "analyzer/video:video_quality_analyzer_injection_helper",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+ }
+
+ rtc_library("media_helper") {
+ testonly = true
+ sources = [
+ "media/media_helper.cc",
+ "media/media_helper.h",
+ "media/test_video_capturer_video_track_source.h",
+ ]
+ deps = [
+ ":test_peer",
+ "../..:fileutils",
+ "../..:platform_video_capturer",
+ "../..:video_test_common",
+ "../../../api:create_frame_generator",
+ "../../../api:frame_generator_api",
+ "../../../api:media_stream_interface",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/video:video_frame",
+ "../../../pc:session_description",
+ "../../../pc:video_track_source",
+ "analyzer/video:video_quality_analyzer_injection_helper",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ]
+ }
+
+ rtc_library("peer_params_preprocessor") {
+ visibility = [ "*" ]
+ testonly = true
+ sources = [
+ "peer_params_preprocessor.cc",
+ "peer_params_preprocessor.h",
+ ]
+ deps = [
+ "../..:fileutils",
+ "../../../api:peer_network_dependencies",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../modules/video_coding/svc:scalability_mode_util",
+ "../../../modules/video_coding/svc:scalability_structures",
+ "../../../rtc_base:macromagic",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("test_activities_executor") {
+ testonly = true
+ sources = [
+ "test_activities_executor.cc",
+ "test_activities_executor.h",
+ ]
+ deps = [
+ "../../../api/task_queue",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../rtc_base/task_utils:repeating_task",
+ "../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("peerconnection_quality_test") {
+ testonly = true
+
+ sources = [
+ "peer_connection_quality_test.cc",
+ "peer_connection_quality_test.h",
+ ]
+ deps = [
+ ":analyzer_helper",
+ ":cross_media_metrics_reporter",
+ ":default_audio_quality_analyzer",
+ ":media_helper",
+ ":metric_metadata_keys",
+ ":peer_params_preprocessor",
+ ":sdp_changer",
+ ":stats_poller",
+ ":test_activities_executor",
+ ":test_peer",
+ ":test_peer_factory",
+ "../..:field_trial",
+ "../..:fileutils",
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_event_log_output_file",
+ "../../../api:scoped_refptr",
+ "../../../api:time_controller",
+ "../../../api:video_quality_analyzer_api",
+ "../../../api/rtc_event_log",
+ "../../../api/task_queue",
+ "../../../api/test/metrics:metric",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../pc:pc_test_utils",
+ "../../../pc:sdp_utils",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base:safe_conversions",
+ "../../../rtc_base:stringutils",
+ "../../../rtc_base:task_queue_for_test",
+ "../../../rtc_base:threading",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers",
+ "../../../system_wrappers:field_trial",
+ "analyzer/video:default_video_quality_analyzer",
+ "analyzer/video:single_process_encoded_image_data_injector",
+ "analyzer/video:video_frame_tracking_id_injector",
+ "analyzer/video:video_quality_analyzer_injection_helper",
+ "analyzer/video:video_quality_metrics_reporter",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ peer_connection_e2e_smoke_test_resources = [
+ "../../../resources/pc_quality_smoke_test_alice_source.wav",
+ "../../../resources/pc_quality_smoke_test_bob_source.wav",
+ ]
+ if (is_ios) {
+ bundle_data("peer_connection_e2e_smoke_test_resources_bundle_data") {
+ testonly = true
+ sources = peer_connection_e2e_smoke_test_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("peer_connection_e2e_smoke_test") {
+ testonly = true
+
+ sources = [ "peer_connection_e2e_smoke_test.cc" ]
+ deps = [
+ ":default_audio_quality_analyzer",
+ ":network_quality_metrics_reporter",
+ ":stats_based_network_quality_metrics_reporter",
+ "../../../api:callfactory_api",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:create_peerconnection_quality_test_fixture",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:media_stream_interface",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:scoped_refptr",
+ "../../../api:simulated_network_api",
+ "../../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../../api/test/metrics:global_metrics_logger_and_exporter",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/video_codecs:builtin_video_decoder_factory",
+ "../../../api/video_codecs:builtin_video_encoder_factory",
+ "../../../call:simulated_network",
+ "../../../media:rtc_audio_video",
+ "../../../modules/audio_device:audio_device_impl",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:pc_test_utils",
+ "../../../pc:peerconnection_wrapper",
+ "../../../rtc_base:gunit_helpers",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_event",
+ "../../../system_wrappers:field_trial",
+ "../../../test:field_trial",
+ "../../../test:fileutils",
+ "../../../test:test_support",
+ "analyzer/video:default_video_quality_analyzer",
+ "analyzer/video:default_video_quality_analyzer_shared",
+ ]
+ data = peer_connection_e2e_smoke_test_resources
+ if (is_mac || is_ios) {
+ deps += [ "../../../modules/video_coding:objc_codec_factory_helper" ]
+ }
+ if (is_ios) {
+ deps += [ ":peer_connection_e2e_smoke_test_resources_bundle_data" ]
+ }
+ }
+
+ rtc_library("peer_connection_quality_test_metric_names_test") {
+ testonly = true
+ sources = [ "peer_connection_quality_test_metric_names_test.cc" ]
+ deps = [
+ ":metric_metadata_keys",
+ ":peerconnection_quality_test",
+ ":stats_based_network_quality_metrics_reporter",
+ "../..:test_support",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/test/metrics:stdout_metrics_exporter",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/units:time_delta",
+ ]
+ }
+
+ rtc_library("stats_based_network_quality_metrics_reporter_test") {
+ testonly = true
+ sources = [ "stats_based_network_quality_metrics_reporter_test.cc" ]
+ deps = [
+ ":metric_metadata_keys",
+ ":peerconnection_quality_test",
+ ":stats_based_network_quality_metrics_reporter",
+ "../..:test_support",
+ "../../../api:array_view",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:create_peer_connection_quality_test_frame_generator",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/test/metrics:stdout_metrics_exporter",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/units:time_delta",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("peer_connection_quality_test_test") {
+ testonly = true
+ sources = [ "peer_connection_quality_test_test.cc" ]
+ deps = [
+ ":peerconnection_quality_test",
+ "../..:fileutils",
+ "../..:test_support",
+ "../..:video_test_support",
+ "../../../api:create_network_emulation_manager",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api/test/metrics:global_metrics_logger_and_exporter",
+ "../../../api/test/pclf:media_configuration",
+ "../../../api/test/pclf:media_quality_test_params",
+ "../../../api/test/pclf:peer_configurer",
+ "../../../api/units:time_delta",
+ "../../../rtc_base:timeutils",
+ ]
+ }
+
+ rtc_library("stats_provider") {
+ testonly = true
+ sources = [ "stats_provider.h" ]
+ deps = [ "../../../api:rtc_stats_api" ]
+ }
+
+ rtc_library("stats_poller") {
+ testonly = true
+ sources = [
+ "stats_poller.cc",
+ "stats_poller.h",
+ ]
+ deps = [
+ ":stats_provider",
+ ":test_peer",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:macromagic",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ }
+
+ rtc_library("stats_poller_test") {
+ testonly = true
+ sources = [ "stats_poller_test.cc" ]
+ deps = [
+ ":stats_poller",
+ "../..:test_support",
+ "../../../api:rtc_stats_api",
+ ]
+ }
+ }
+
+ rtc_library("analyzer_helper") {
+ sources = [
+ "analyzer_helper.cc",
+ "analyzer_helper.h",
+ ]
+ deps = [
+ "../../../api:sequence_checker",
+ "../../../api:track_id_stream_info_map",
+ "../../../rtc_base:macromagic",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("default_audio_quality_analyzer") {
+ testonly = true
+ sources = [
+ "analyzer/audio/default_audio_quality_analyzer.cc",
+ "analyzer/audio/default_audio_quality_analyzer.h",
+ ]
+
+ deps = [
+ ":metric_metadata_keys",
+ "../..:perf_test",
+ "../../../api:audio_quality_analyzer_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:stats_observer_interface",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/numerics",
+ "../../../api/test/metrics:metric",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/units:time_delta",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:logging",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("network_quality_metrics_reporter") {
+ testonly = true
+ sources = [
+ "network_quality_metrics_reporter.cc",
+ "network_quality_metrics_reporter.h",
+ ]
+ deps = [
+ "../..:perf_test",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/test/metrics:metric",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/units:data_size",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("stats_based_network_quality_metrics_reporter") {
+ testonly = true
+ sources = [
+ "stats_based_network_quality_metrics_reporter.cc",
+ "stats_based_network_quality_metrics_reporter.h",
+ ]
+ deps = [
+ ":metric_metadata_keys",
+ "../..:perf_test",
+ "../../../api:array_view",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:scoped_refptr",
+ "../../../api:sequence_checker",
+ "../../../api/numerics",
+ "../../../api/test/metrics:metric",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/test/network_emulation",
+ "../../../api/units:data_rate",
+ "../../../api/units:data_size",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:ip_address",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base:stringutils",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../rtc_base/system:no_unique_address",
+ "../../../system_wrappers:field_trial",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+ }
+
+ rtc_library("cross_media_metrics_reporter") {
+ testonly = true
+ sources = [
+ "cross_media_metrics_reporter.cc",
+ "cross_media_metrics_reporter.h",
+ ]
+ deps = [
+ ":metric_metadata_keys",
+ "../..:perf_test",
+ "../../../api:network_emulation_manager_api",
+ "../../../api:peer_connection_quality_test_fixture_api",
+ "../../../api:rtc_stats_api",
+ "../../../api:track_id_stream_info_map",
+ "../../../api/numerics",
+ "../../../api/test/metrics:metric",
+ "../../../api/test/metrics:metrics_logger",
+ "../../../api/units:timestamp",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:criticalsection",
+ "../../../rtc_base:rtc_event",
+ "../../../rtc_base:rtc_numerics",
+ "../../../rtc_base/synchronization:mutex",
+ "../../../system_wrappers:field_trial",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("sdp_changer") {
+ testonly = true
+ sources = [
+ "sdp/sdp_changer.cc",
+ "sdp/sdp_changer.h",
+ ]
+ deps = [
+ "../../../api:array_view",
+ "../../../api:libjingle_peerconnection_api",
+ "../../../api:rtp_parameters",
+ "../../../api/test/pclf:media_configuration",
+ "../../../media:media_constants",
+ "../../../media:rid_description",
+ "../../../media:rtc_media_base",
+ "../../../p2p:rtc_p2p",
+ "../../../pc:sdp_utils",
+ "../../../pc:session_description",
+ "../../../pc:simulcast_description",
+ "../../../rtc_base:stringutils",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
new file mode 100644
index 0000000000..98d0c533c2
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
+
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/metric.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+DefaultAudioQualityAnalyzer::DefaultAudioQualityAnalyzer(
+ test::MetricsLogger* const metrics_logger)
+ : metrics_logger_(metrics_logger) {
+ RTC_CHECK(metrics_logger_);
+}
+
+void DefaultAudioQualityAnalyzer::Start(std::string test_case_name,
+ TrackIdStreamInfoMap* analyzer_helper) {
+ test_case_name_ = std::move(test_case_name);
+ analyzer_helper_ = analyzer_helper;
+}
+
+void DefaultAudioQualityAnalyzer::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ auto stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+
+ for (auto& stat : stats) {
+ if (!stat->kind.is_defined() ||
+ !(*stat->kind == RTCMediaStreamTrackKind::kAudio)) {
+ continue;
+ }
+
+ StatsSample sample;
+ sample.total_samples_received =
+ stat->total_samples_received.ValueOrDefault(0ul);
+ sample.concealed_samples = stat->concealed_samples.ValueOrDefault(0ul);
+ sample.removed_samples_for_acceleration =
+ stat->removed_samples_for_acceleration.ValueOrDefault(0ul);
+ sample.inserted_samples_for_deceleration =
+ stat->inserted_samples_for_deceleration.ValueOrDefault(0ul);
+ sample.silent_concealed_samples =
+ stat->silent_concealed_samples.ValueOrDefault(0ul);
+ sample.jitter_buffer_delay =
+ TimeDelta::Seconds(stat->jitter_buffer_delay.ValueOrDefault(0.));
+ sample.jitter_buffer_target_delay =
+ TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.));
+ sample.jitter_buffer_emitted_count =
+ stat->jitter_buffer_emitted_count.ValueOrDefault(0ul);
+
+ TrackIdStreamInfoMap::StreamInfo stream_info =
+ analyzer_helper_->GetStreamInfoFromTrackId(*stat->track_identifier);
+
+ MutexLock lock(&lock_);
+ stream_info_.emplace(stream_info.stream_label, stream_info);
+ StatsSample prev_sample = last_stats_sample_[stream_info.stream_label];
+ RTC_CHECK_GE(sample.total_samples_received,
+ prev_sample.total_samples_received);
+ double total_samples_diff = static_cast<double>(
+ sample.total_samples_received - prev_sample.total_samples_received);
+ if (total_samples_diff == 0) {
+ return;
+ }
+
+ AudioStreamStats& audio_stream_stats =
+ streams_stats_[stream_info.stream_label];
+ audio_stream_stats.expand_rate.AddSample(
+ (sample.concealed_samples - prev_sample.concealed_samples) /
+ total_samples_diff);
+ audio_stream_stats.accelerate_rate.AddSample(
+ (sample.removed_samples_for_acceleration -
+ prev_sample.removed_samples_for_acceleration) /
+ total_samples_diff);
+ audio_stream_stats.preemptive_rate.AddSample(
+ (sample.inserted_samples_for_deceleration -
+ prev_sample.inserted_samples_for_deceleration) /
+ total_samples_diff);
+
+ int64_t speech_concealed_samples =
+ sample.concealed_samples - sample.silent_concealed_samples;
+ int64_t prev_speech_concealed_samples =
+ prev_sample.concealed_samples - prev_sample.silent_concealed_samples;
+ audio_stream_stats.speech_expand_rate.AddSample(
+ (speech_concealed_samples - prev_speech_concealed_samples) /
+ total_samples_diff);
+
+ int64_t jitter_buffer_emitted_count_diff =
+ sample.jitter_buffer_emitted_count -
+ prev_sample.jitter_buffer_emitted_count;
+ if (jitter_buffer_emitted_count_diff > 0) {
+ TimeDelta jitter_buffer_delay_diff =
+ sample.jitter_buffer_delay - prev_sample.jitter_buffer_delay;
+ TimeDelta jitter_buffer_target_delay_diff =
+ sample.jitter_buffer_target_delay -
+ prev_sample.jitter_buffer_target_delay;
+ audio_stream_stats.average_jitter_buffer_delay_ms.AddSample(
+ jitter_buffer_delay_diff.ms<double>() /
+ jitter_buffer_emitted_count_diff);
+ audio_stream_stats.preferred_buffer_size_ms.AddSample(
+ jitter_buffer_target_delay_diff.ms<double>() /
+ jitter_buffer_emitted_count_diff);
+ }
+
+ last_stats_sample_[stream_info.stream_label] = sample;
+ }
+}
+
+std::string DefaultAudioQualityAnalyzer::GetTestCaseName(
+ const std::string& stream_label) const {
+ return test_case_name_ + "/" + stream_label;
+}
+
+void DefaultAudioQualityAnalyzer::Stop() {
+ MutexLock lock(&lock_);
+ for (auto& item : streams_stats_) {
+ const TrackIdStreamInfoMap::StreamInfo& stream_info =
+ stream_info_[item.first];
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> metric_metadata{
+ {MetricMetadataKey::kAudioStreamMetadataKey, item.first},
+ {MetricMetadataKey::kPeerMetadataKey, stream_info.receiver_peer},
+ {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+
+ metrics_logger_->LogMetric("expand_rate", GetTestCaseName(item.first),
+ item.second.expand_rate, Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric("accelerate_rate", GetTestCaseName(item.first),
+ item.second.accelerate_rate, Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric("preemptive_rate", GetTestCaseName(item.first),
+ item.second.preemptive_rate, Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric(
+ "speech_expand_rate", GetTestCaseName(item.first),
+ item.second.speech_expand_rate, Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "average_jitter_buffer_delay_ms", GetTestCaseName(item.first),
+ item.second.average_jitter_buffer_delay_ms, Unit::kMilliseconds,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "preferred_buffer_size_ms", GetTestCaseName(item.first),
+ item.second.preferred_buffer_size_ms, Unit::kMilliseconds,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+}
+
+std::map<std::string, AudioStreamStats>
+DefaultAudioQualityAnalyzer::GetAudioStreamsStats() const {
+ MutexLock lock(&lock_);
+ return streams_stats_;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
new file mode 100644
index 0000000000..9e427afed8
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_AUDIO_DEFAULT_AUDIO_QUALITY_ANALYZER_H_
+#define TEST_PC_E2E_ANALYZER_AUDIO_DEFAULT_AUDIO_QUALITY_ANALYZER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/audio_quality_analyzer_interface.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+struct AudioStreamStats {
+ SamplesStatsCounter expand_rate;
+ SamplesStatsCounter accelerate_rate;
+ SamplesStatsCounter preemptive_rate;
+ SamplesStatsCounter speech_expand_rate;
+ SamplesStatsCounter average_jitter_buffer_delay_ms;
+ SamplesStatsCounter preferred_buffer_size_ms;
+};
+
+class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface {
+ public:
+ explicit DefaultAudioQualityAnalyzer(
+ test::MetricsLogger* const metrics_logger);
+
+ void Start(std::string test_case_name,
+ TrackIdStreamInfoMap* analyzer_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void Stop() override;
+
+ // Returns audio quality stats per stream label.
+ std::map<std::string, AudioStreamStats> GetAudioStreamsStats() const;
+
+ private:
+ struct StatsSample {
+ uint64_t total_samples_received = 0;
+ uint64_t concealed_samples = 0;
+ uint64_t removed_samples_for_acceleration = 0;
+ uint64_t inserted_samples_for_deceleration = 0;
+ uint64_t silent_concealed_samples = 0;
+ TimeDelta jitter_buffer_delay = TimeDelta::Zero();
+ TimeDelta jitter_buffer_target_delay = TimeDelta::Zero();
+ uint64_t jitter_buffer_emitted_count = 0;
+ };
+
+ std::string GetTestCaseName(const std::string& stream_label) const;
+
+ test::MetricsLogger* const metrics_logger_;
+
+ std::string test_case_name_;
+ TrackIdStreamInfoMap* analyzer_helper_;
+
+ mutable Mutex lock_;
+ std::map<std::string, AudioStreamStats> streams_stats_ RTC_GUARDED_BY(lock_);
+ std::map<std::string, TrackIdStreamInfoMap::StreamInfo> stream_info_
+ RTC_GUARDED_BY(lock_);
+ std::map<std::string, StatsSample> last_stats_sample_ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_AUDIO_DEFAULT_AUDIO_QUALITY_ANALYZER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn
new file mode 100644
index 0000000000..cbb4c078f3
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn
@@ -0,0 +1,573 @@
+# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../../../webrtc.gni")
+
+if (!build_with_chromium) {
+ group("video_analyzer") {
+ testonly = true
+
+ deps = [
+ ":analyzing_video_sinks_helper",
+ ":default_video_quality_analyzer_internal",
+ ":encoded_image_data_injector_api",
+ ":example_video_quality_analyzer",
+ ":multi_reader_queue",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":simulcast_dummy_buffer_helper",
+ ":single_process_encoded_image_data_injector",
+ ":video_dumping",
+ ":video_frame_tracking_id_injector",
+ ":video_quality_metrics_reporter",
+ ]
+ if (rtc_include_tests) {
+ deps += [
+ ":analyzing_video_sink",
+ ":video_quality_analyzer_injection_helper",
+ ]
+ }
+ }
+
+ if (rtc_include_tests) {
+ group("video_analyzer_unittests") {
+ testonly = true
+
+ deps = [
+ ":analyzing_video_sink_test",
+ ":analyzing_video_sinks_helper_test",
+ ":default_video_quality_analyzer_frames_comparator_test",
+ ":default_video_quality_analyzer_metric_names_test",
+ ":default_video_quality_analyzer_stream_state_test",
+ ":default_video_quality_analyzer_test",
+ ":multi_reader_queue_test",
+ ":names_collection_test",
+ ":simulcast_dummy_buffer_helper_test",
+ ":single_process_encoded_image_data_injector_unittest",
+ ":video_dumping_test",
+ ":video_frame_tracking_id_injector_unittest",
+ ]
+ }
+ }
+}
+
+rtc_library("video_dumping") {
+ testonly = true
+ sources = [
+ "video_dumping.cc",
+ "video_dumping.h",
+ ]
+ deps = [
+ "../../../..:video_test_support",
+ "../../../../../api/test/video:video_frame_writer",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:logging",
+ "../../../../../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_library("encoded_image_data_injector_api") {
+ testonly = true
+ sources = [ "encoded_image_data_injector.h" ]
+
+ deps = [ "../../../../../api/video:encoded_image" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("single_process_encoded_image_data_injector") {
+ testonly = true
+ sources = [
+ "single_process_encoded_image_data_injector.cc",
+ "single_process_encoded_image_data_injector.h",
+ ]
+
+ deps = [
+ ":encoded_image_data_injector_api",
+ "../../../../../api/video:encoded_image",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+}
+
+rtc_library("video_frame_tracking_id_injector") {
+ testonly = true
+ sources = [
+ "video_frame_tracking_id_injector.cc",
+ "video_frame_tracking_id_injector.h",
+ ]
+
+ deps = [
+ ":encoded_image_data_injector_api",
+ "../../../../../api/video:encoded_image",
+ "../../../../../rtc_base:checks",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/memory" ]
+}
+
+rtc_library("simulcast_dummy_buffer_helper") {
+ testonly = true
+ sources = [
+ "simulcast_dummy_buffer_helper.cc",
+ "simulcast_dummy_buffer_helper.h",
+ ]
+ deps = [ "../../../../../api/video:video_frame" ]
+}
+
+rtc_library("quality_analyzing_video_decoder") {
+ testonly = true
+ sources = [
+ "quality_analyzing_video_decoder.cc",
+ "quality_analyzing_video_decoder.h",
+ ]
+ deps = [
+ ":encoded_image_data_injector_api",
+ ":simulcast_dummy_buffer_helper",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/video:encoded_image",
+ "../../../../../api/video:video_frame",
+ "../../../../../api/video_codecs:video_codecs_api",
+ "../../../../../modules/video_coding:video_codec_interface",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("quality_analyzing_video_encoder") {
+ testonly = true
+ sources = [
+ "quality_analyzing_video_encoder.cc",
+ "quality_analyzing_video_encoder.h",
+ ]
+ deps = [
+ ":encoded_image_data_injector_api",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/test/pclf:media_configuration",
+ "../../../../../api/video:video_frame",
+ "../../../../../api/video_codecs:video_codecs_api",
+ "../../../../../modules/video_coding:video_codec_interface",
+ "../../../../../modules/video_coding/svc:scalability_mode_util",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_library("analyzing_video_sinks_helper") {
+ testonly = true
+ sources = [
+ "analyzing_video_sinks_helper.cc",
+ "analyzing_video_sinks_helper.h",
+ ]
+ deps = [
+ "../../../../../api/test/pclf:media_configuration",
+ "../../../../../api/test/video:video_frame_writer",
+ "../../../../../rtc_base:macromagic",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("example_video_quality_analyzer") {
+ testonly = true
+ sources = [
+ "example_video_quality_analyzer.cc",
+ "example_video_quality_analyzer.h",
+ ]
+
+ deps = [
+ "../../../../../api:array_view",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/video:encoded_image",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+}
+
+# This target contains implementation details of DefaultVideoQualityAnalyzer,
+# so headers exported by it shouldn't be used in other places.
+rtc_library("default_video_quality_analyzer_internal") {
+ visibility = [
+ ":default_video_quality_analyzer",
+ ":default_video_quality_analyzer_frames_comparator_test",
+ ":default_video_quality_analyzer_stream_state_test",
+ ":names_collection_test",
+ ":video_analyzer",
+ ]
+
+ testonly = true
+ sources = [
+ "default_video_quality_analyzer_cpu_measurer.cc",
+ "default_video_quality_analyzer_cpu_measurer.h",
+ "default_video_quality_analyzer_frame_in_flight.cc",
+ "default_video_quality_analyzer_frame_in_flight.h",
+ "default_video_quality_analyzer_frames_comparator.cc",
+ "default_video_quality_analyzer_frames_comparator.h",
+ "default_video_quality_analyzer_internal_shared_objects.cc",
+ "default_video_quality_analyzer_internal_shared_objects.h",
+ "default_video_quality_analyzer_stream_state.cc",
+ "default_video_quality_analyzer_stream_state.h",
+ "names_collection.cc",
+ "names_collection.h",
+ ]
+
+ deps = [
+ ":default_video_quality_analyzer_shared",
+ ":multi_reader_queue",
+ "../..:metric_metadata_keys",
+ "../../../../../api:array_view",
+ "../../../../../api:scoped_refptr",
+ "../../../../../api/numerics",
+ "../../../../../api/units:data_size",
+ "../../../../../api/units:timestamp",
+ "../../../../../api/video:video_frame",
+ "../../../../../api/video:video_frame_type",
+ "../../../../../common_video",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base:platform_thread",
+ "../../../../../rtc_base:rtc_base_tests_utils",
+ "../../../../../rtc_base:rtc_event",
+ "../../../../../rtc_base:stringutils",
+ "../../../../../rtc_base:timeutils",
+ "../../../../../rtc_base/synchronization:mutex",
+ "../../../../../rtc_tools:video_quality_analysis",
+ "../../../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("multi_reader_queue") {
+ testonly = true
+ sources = [ "multi_reader_queue.h" ]
+ deps = [ "../../../../../rtc_base:checks" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("video_quality_metrics_reporter") {
+ testonly = true
+ sources = [
+ "video_quality_metrics_reporter.cc",
+ "video_quality_metrics_reporter.h",
+ ]
+ deps = [
+ "../..:metric_metadata_keys",
+ "../../../../../api:peer_connection_quality_test_fixture_api",
+ "../../../../../api:rtc_stats_api",
+ "../../../../../api:track_id_stream_info_map",
+ "../../../../../api/numerics",
+ "../../../../../api/test/metrics:metric",
+ "../../../../../api/test/metrics:metrics_logger",
+ "../../../../../api/units:data_rate",
+ "../../../../../api/units:data_size",
+ "../../../../../api/units:time_delta",
+ "../../../../../api/units:timestamp",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base/synchronization:mutex",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_library("default_video_quality_analyzer") {
+ testonly = true
+ sources = [
+ "default_video_quality_analyzer.cc",
+ "default_video_quality_analyzer.h",
+ ]
+
+ deps = [
+ ":default_video_quality_analyzer_internal",
+ ":default_video_quality_analyzer_shared",
+ "../..:metric_metadata_keys",
+ "../../../../../api:array_view",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/numerics",
+ "../../../../../api/test/metrics:metric",
+ "../../../../../api/test/metrics:metrics_logger",
+ "../../../../../api/units:data_size",
+ "../../../../../api/units:time_delta",
+ "../../../../../api/units:timestamp",
+ "../../../../../api/video:encoded_image",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base:macromagic",
+ "../../../../../rtc_base:stringutils",
+ "../../../../../rtc_base/synchronization:mutex",
+ "../../../../../system_wrappers",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("default_video_quality_analyzer_shared") {
+ testonly = true
+ sources = [
+ "default_video_quality_analyzer_shared_objects.cc",
+ "default_video_quality_analyzer_shared_objects.h",
+ ]
+
+ deps = [
+ "../../../../../api/numerics",
+ "../../../../../api/units:timestamp",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base:stringutils",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("analyzing_video_sink") {
+ testonly = true
+ sources = [
+ "analyzing_video_sink.cc",
+ "analyzing_video_sink.h",
+ ]
+ deps = [
+ ":analyzing_video_sinks_helper",
+ ":simulcast_dummy_buffer_helper",
+ ":video_dumping",
+ "../../../..:fixed_fps_video_frame_writer_adapter",
+ "../../../..:test_renderer",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/numerics",
+ "../../../../../api/test/pclf:media_configuration",
+ "../../../../../api/test/video:video_frame_writer",
+ "../../../../../api/units:timestamp",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base:macromagic",
+ "../../../../../rtc_base/synchronization:mutex",
+ "../../../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory:memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_quality_analyzer_injection_helper") {
+ testonly = true
+ sources = [
+ "video_quality_analyzer_injection_helper.cc",
+ "video_quality_analyzer_injection_helper.h",
+ ]
+ deps = [
+ ":analyzing_video_sink",
+ ":analyzing_video_sinks_helper",
+ ":encoded_image_data_injector_api",
+ ":quality_analyzing_video_decoder",
+ ":quality_analyzing_video_encoder",
+ ":simulcast_dummy_buffer_helper",
+ ":video_dumping",
+ "../../../..:fixed_fps_video_frame_writer_adapter",
+ "../../../..:test_renderer",
+ "../../../..:video_test_common",
+ "../../../..:video_test_support",
+ "../../../../../api:array_view",
+ "../../../../../api:stats_observer_interface",
+ "../../../../../api:video_quality_analyzer_api",
+ "../../../../../api/test/pclf:media_configuration",
+ "../../../../../api/video:video_frame",
+ "../../../../../api/video_codecs:video_codecs_api",
+ "../../../../../rtc_base:checks",
+ "../../../../../rtc_base:logging",
+ "../../../../../rtc_base:stringutils",
+ "../../../../../rtc_base/synchronization:mutex",
+ "../../../../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_library("simulcast_dummy_buffer_helper_test") {
+ testonly = true
+ sources = [ "simulcast_dummy_buffer_helper_test.cc" ]
+ deps = [
+ ":simulcast_dummy_buffer_helper",
+ "../../../..:test_support",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:random",
+ ]
+ }
+
+ rtc_library("analyzing_video_sink_test") {
+ testonly = true
+ sources = [ "analyzing_video_sink_test.cc" ]
+ deps = [
+ ":analyzing_video_sink",
+ ":example_video_quality_analyzer",
+ "../../../..:fileutils",
+ "../../../..:test_support",
+ "../../../..:video_test_support",
+ "../../../../../api:create_frame_generator",
+ "../../../../../api:frame_generator_api",
+ "../../../../../api:scoped_refptr",
+ "../../../../../api/test/pclf:media_configuration",
+ "../../../../../api/units:time_delta",
+ "../../../../../api/units:timestamp",
+ "../../../../../api/video:video_frame",
+ "../../../../../common_video",
+ "../../../../../rtc_base:timeutils",
+ "../../../../../system_wrappers",
+ "../../../../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("analyzing_video_sinks_helper_test") {
+ testonly = true
+ sources = [ "analyzing_video_sinks_helper_test.cc" ]
+ deps = [
+ ":analyzing_video_sinks_helper",
+ "../../../..:test_support",
+ "../../../../../api/test/pclf:media_configuration",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("default_video_quality_analyzer_frames_comparator_test") {
+ testonly = true
+ sources = [ "default_video_quality_analyzer_frames_comparator_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer_internal",
+ ":default_video_quality_analyzer_shared",
+ "../../../..:test_support",
+ "../../../../../api:create_frame_generator",
+ "../../../../../api/units:timestamp",
+ "../../../../../rtc_base:stringutils",
+ "../../../../../system_wrappers",
+ ]
+ }
+
+ rtc_library("names_collection_test") {
+ testonly = true
+ sources = [ "names_collection_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer_internal",
+ "../../../..:test_support",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings:strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ }
+
+ rtc_library("multi_reader_queue_test") {
+ testonly = true
+ sources = [ "multi_reader_queue_test.cc" ]
+ deps = [
+ ":multi_reader_queue",
+ "../../../..:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("default_video_quality_analyzer_stream_state_test") {
+ testonly = true
+ sources = [ "default_video_quality_analyzer_stream_state_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer_internal",
+ "../../../..:test_support",
+ "../../../../../api/units:timestamp",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("default_video_quality_analyzer_test") {
+ testonly = true
+ sources = [ "default_video_quality_analyzer_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer",
+ ":default_video_quality_analyzer_shared",
+ "../../../..:test_support",
+ "../../../../../api:create_frame_generator",
+ "../../../../../api:rtp_packet_info",
+ "../../../../../api/test/metrics:global_metrics_logger_and_exporter",
+ "../../../../../api/video:encoded_image",
+ "../../../../../api/video:video_frame",
+ "../../../../../common_video",
+ "../../../../../rtc_base:stringutils",
+ "../../../../../rtc_tools:video_quality_analysis",
+ "../../../../../system_wrappers",
+ ]
+ }
+
+ rtc_library("default_video_quality_analyzer_metric_names_test") {
+ testonly = true
+ sources = [ "default_video_quality_analyzer_metric_names_test.cc" ]
+ deps = [
+ ":default_video_quality_analyzer",
+ "../../../..:test_support",
+ "../../../../../api:create_frame_generator",
+ "../../../../../api:rtp_packet_info",
+ "../../../../../api/test/metrics:metric",
+ "../../../../../api/test/metrics:metrics_logger",
+ "../../../../../api/test/metrics:stdout_metrics_exporter",
+ "../../../../../api/video:encoded_image",
+ "../../../../../api/video:video_frame",
+ "../../../../../common_video",
+ "../../../../../rtc_tools:video_quality_analysis",
+ "../../../../../system_wrappers",
+ ]
+ }
+
+ rtc_library("video_dumping_test") {
+ testonly = true
+ sources = [ "video_dumping_test.cc" ]
+ deps = [
+ ":video_dumping",
+ "../../../..:fileutils",
+ "../../../..:test_support",
+ "../../../..:video_test_support",
+ "../../../../../api:scoped_refptr",
+ "../../../../../api/video:video_frame",
+ "../../../../../rtc_base:random",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("single_process_encoded_image_data_injector_unittest") {
+ testonly = true
+ sources = [ "single_process_encoded_image_data_injector_unittest.cc" ]
+ deps = [
+ ":single_process_encoded_image_data_injector",
+ "../../../..:test_support",
+ "../../../../../api/video:encoded_image",
+ "../../../../../rtc_base:buffer",
+ ]
+ }
+
+ rtc_library("video_frame_tracking_id_injector_unittest") {
+ testonly = true
+ sources = [ "video_frame_tracking_id_injector_unittest.cc" ]
+ deps = [
+ ":video_frame_tracking_id_injector",
+ "../../../..:test_support",
+ "../../../../../api/video:encoded_image",
+ "../../../../../rtc_base:buffer",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc
new file mode 100644
index 0000000000..fb221e6797
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc
@@ -0,0 +1,220 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h"
+
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/units/timestamp.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
+#include "test/pc/e2e/analyzer/video/video_dumping.h"
+#include "test/testsupport/fixed_fps_video_frame_writer_adapter.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+AnalyzingVideoSink::AnalyzingVideoSink(absl::string_view peer_name,
+ Clock* clock,
+ VideoQualityAnalyzerInterface& analyzer,
+ AnalyzingVideoSinksHelper& sinks_helper,
+ const VideoSubscription& subscription,
+ bool report_infra_stats)
+ : peer_name_(peer_name),
+ report_infra_stats_(report_infra_stats),
+ clock_(clock),
+ analyzer_(&analyzer),
+ sinks_helper_(&sinks_helper),
+ subscription_(subscription) {}
+
+void AnalyzingVideoSink::UpdateSubscription(
+ const VideoSubscription& subscription) {
+ // For peers with changed resolutions we need to close current writers and
+ // open new ones. This is done by removing existing sinks, which will force
+ // creation of the new sinks when next frame will be received.
+ std::set<test::VideoFrameWriter*> writers_to_close;
+ {
+ MutexLock lock(&mutex_);
+ subscription_ = subscription;
+ for (auto it = stream_sinks_.cbegin(); it != stream_sinks_.cend();) {
+ absl::optional<VideoResolution> new_requested_resolution =
+ subscription_.GetResolutionForPeer(it->second.sender_peer_name);
+ if (!new_requested_resolution.has_value() ||
+ (*new_requested_resolution != it->second.resolution)) {
+ RTC_LOG(LS_INFO) << peer_name_ << ": Subscribed resolution for stream "
+ << it->first << " from " << it->second.sender_peer_name
+ << " was updated from "
+ << it->second.resolution.ToString() << " to "
+ << new_requested_resolution->ToString()
+ << ". Repopulating all video sinks and recreating "
+ << "requested video writers";
+ writers_to_close.insert(it->second.video_frame_writer);
+ it = stream_sinks_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ }
+ sinks_helper_->CloseAndRemoveVideoWriters(writers_to_close);
+}
+
+void AnalyzingVideoSink::OnFrame(const VideoFrame& frame) {
+ if (IsDummyFrame(frame)) {
+ // This is dummy frame, so we don't need to process it further.
+ return;
+ }
+
+ if (frame.id() == VideoFrame::kNotSetId) {
+ // If frame ID is unknown we can't get required render resolution, so pass
+ // to the analyzer in the actual resolution of the frame.
+ AnalyzeFrame(frame);
+ } else {
+ std::string stream_label = analyzer_->GetStreamLabel(frame.id());
+ MutexLock lock(&mutex_);
+ Timestamp processing_started = clock_->CurrentTime();
+ SinksDescriptor* sinks_descriptor = PopulateSinks(stream_label);
+ RTC_CHECK(sinks_descriptor != nullptr);
+
+ VideoFrame scaled_frame =
+ ScaleVideoFrame(frame, sinks_descriptor->resolution);
+ AnalyzeFrame(scaled_frame);
+ for (auto& sink : sinks_descriptor->sinks) {
+ sink->OnFrame(scaled_frame);
+ }
+ Timestamp processing_finished = clock_->CurrentTime();
+
+ if (report_infra_stats_) {
+ stats_.analyzing_sink_processing_time_ms.AddSample(
+ (processing_finished - processing_started).ms<double>());
+ }
+ }
+}
+
+AnalyzingVideoSink::Stats AnalyzingVideoSink::stats() const {
+ MutexLock lock(&mutex_);
+ return stats_;
+}
+
+VideoFrame AnalyzingVideoSink::ScaleVideoFrame(
+ const VideoFrame& frame,
+ const VideoResolution& required_resolution) {
+ Timestamp processing_started = clock_->CurrentTime();
+ if (required_resolution.width() == static_cast<size_t>(frame.width()) &&
+ required_resolution.height() == static_cast<size_t>(frame.height())) {
+ if (report_infra_stats_) {
+ stats_.scaling_tims_ms.AddSample(
+ (clock_->CurrentTime() - processing_started).ms<double>());
+ }
+ return frame;
+ }
+
+ // We allow some difference in the aspect ration because when decoder
+ // downscales video stream it may round up some dimensions to make them even,
+ // ex: 960x540 -> 480x270 -> 240x136 instead of 240x135.
+ RTC_CHECK_LE(std::abs(static_cast<double>(required_resolution.width()) /
+ required_resolution.height() -
+ static_cast<double>(frame.width()) / frame.height()),
+ 0.1)
+ << peer_name_
+ << ": Received frame has too different aspect ratio compared to "
+ << "requested video resolution: required resolution="
+ << required_resolution.ToString()
+ << "; actual resolution=" << frame.width() << "x" << frame.height();
+
+ rtc::scoped_refptr<I420Buffer> scaled_buffer(I420Buffer::Create(
+ required_resolution.width(), required_resolution.height()));
+ scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420());
+
+ VideoFrame scaled_frame = frame;
+ scaled_frame.set_video_frame_buffer(scaled_buffer);
+ if (report_infra_stats_) {
+ stats_.scaling_tims_ms.AddSample(
+ (clock_->CurrentTime() - processing_started).ms<double>());
+ }
+ return scaled_frame;
+}
+
+void AnalyzingVideoSink::AnalyzeFrame(const VideoFrame& frame) {
+ VideoFrame frame_copy = frame;
+ frame_copy.set_video_frame_buffer(
+ I420Buffer::Copy(*frame.video_frame_buffer()->ToI420()));
+ analyzer_->OnFrameRendered(peer_name_, frame_copy);
+}
+
+AnalyzingVideoSink::SinksDescriptor* AnalyzingVideoSink::PopulateSinks(
+ absl::string_view stream_label) {
+ // Fast pass: sinks already exists.
+ auto sinks_it = stream_sinks_.find(std::string(stream_label));
+ if (sinks_it != stream_sinks_.end()) {
+ return &sinks_it->second;
+ }
+
+ // Slow pass: we need to create and save sinks
+ absl::optional<std::pair<std::string, VideoConfig>> peer_and_config =
+ sinks_helper_->GetPeerAndConfig(stream_label);
+ RTC_CHECK(peer_and_config.has_value())
+ << "No video config for stream " << stream_label;
+ const std::string& sender_peer_name = peer_and_config->first;
+ const VideoConfig& config = peer_and_config->second;
+
+ absl::optional<VideoResolution> resolution =
+ subscription_.GetResolutionForPeer(sender_peer_name);
+ if (!resolution.has_value()) {
+ RTC_LOG(LS_ERROR) << peer_name_ << " received stream " << stream_label
+ << " from " << sender_peer_name
+ << " for which they were not subscribed";
+ resolution = config.GetResolution();
+ }
+ if (!resolution->IsRegular()) {
+ RTC_LOG(LS_ERROR) << peer_name_ << " received stream " << stream_label
+ << " from " << sender_peer_name
+ << " for which resolution wasn't resolved";
+ resolution = config.GetResolution();
+ }
+
+ RTC_CHECK(resolution.has_value());
+
+ SinksDescriptor sinks_descriptor(sender_peer_name, *resolution);
+ if (config.output_dump_options.has_value()) {
+ std::unique_ptr<test::VideoFrameWriter> writer =
+ config.output_dump_options->CreateOutputDumpVideoFrameWriter(
+ stream_label, peer_name_, *resolution);
+ if (config.output_dump_use_fixed_framerate) {
+ writer = std::make_unique<test::FixedFpsVideoFrameWriterAdapter>(
+ resolution->fps(), clock_, std::move(writer));
+ }
+ sinks_descriptor.sinks.push_back(std::make_unique<VideoWriter>(
+ writer.get(), config.output_dump_options->sampling_modulo()));
+ sinks_descriptor.video_frame_writer =
+ sinks_helper_->AddVideoWriter(std::move(writer));
+ }
+ if (config.show_on_screen) {
+ sinks_descriptor.sinks.push_back(
+ absl::WrapUnique(test::VideoRenderer::Create(
+ (*config.stream_label + "-render").c_str(), resolution->width(),
+ resolution->height())));
+ }
+ return &stream_sinks_.emplace(stream_label, std::move(sinks_descriptor))
+ .first->second;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h
new file mode 100644
index 0000000000..1834bbe469
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// A sink to inject video quality analyzer as a sink into WebRTC.
+class AnalyzingVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ struct Stats {
+ // Time required to scale video frame to the requested rendered resolution.
+ // Collected only for frames with ID set and iff `report_infra_stats` is
+ // true.
+ SamplesStatsCounter scaling_tims_ms;
+ // Time required to process single video frame. Collected only for frames
+ // with ID set and iff `report_infra_stats` is true.
+ SamplesStatsCounter analyzing_sink_processing_time_ms;
+ };
+
+ AnalyzingVideoSink(absl::string_view peer_name,
+ Clock* clock,
+ VideoQualityAnalyzerInterface& analyzer,
+ AnalyzingVideoSinksHelper& sinks_helper,
+ const VideoSubscription& subscription,
+ bool report_infra_stats);
+
+ // Updates subscription used by this peer to render received video.
+ void UpdateSubscription(const VideoSubscription& subscription);
+
+ void OnFrame(const VideoFrame& frame) override;
+
+ Stats stats() const;
+
+ private:
+ struct SinksDescriptor {
+ SinksDescriptor(absl::string_view sender_peer_name,
+ const VideoResolution& resolution)
+ : sender_peer_name(sender_peer_name), resolution(resolution) {}
+
+ // Required to be able to resolve resolutions on new subscription and
+ // understand if we need to recreate `video_frame_writer` and `sinks`.
+ std::string sender_peer_name;
+ // Resolution which was used to create `video_frame_writer` and `sinks`.
+ VideoResolution resolution;
+
+ // Is set if dumping of output video was requested;
+ test::VideoFrameWriter* video_frame_writer = nullptr;
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
+ };
+
+ // Scales video frame to `required_resolution` if necessary. Crashes if video
+ // frame and `required_resolution` have different aspect ratio.
+ VideoFrame ScaleVideoFrame(const VideoFrame& frame,
+ const VideoResolution& required_resolution)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Creates full copy of the frame to free any frame owned internal buffers
+ // and passes created copy to analyzer. Uses `I420Buffer` to represent
+ // frame content.
+ void AnalyzeFrame(const VideoFrame& frame);
+ // Populates sink for specified stream and caches them in `stream_sinks_`.
+ SinksDescriptor* PopulateSinks(absl::string_view stream_label)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ const std::string peer_name_;
+ const bool report_infra_stats_;
+ Clock* const clock_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+ AnalyzingVideoSinksHelper* const sinks_helper_;
+
+ mutable Mutex mutex_;
+ VideoSubscription subscription_ RTC_GUARDED_BY(mutex_);
+ std::map<std::string, SinksDescriptor> stream_sinks_ RTC_GUARDED_BY(mutex_);
+ Stats stats_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINK_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc
new file mode 100644
index 0000000000..6cd89551ea
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc
@@ -0,0 +1,598 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h"
+
+#include <stdio.h>
+
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/time_utils.h"
+#include "system_wrappers/include/clock.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::Ge;
+using ::testing::Test;
+
+// Remove files and directories in a directory non-recursively.
+void CleanDir(absl::string_view dir, size_t expected_output_files_count) {
+ absl::optional<std::vector<std::string>> dir_content =
+ test::ReadDirectory(dir);
+ if (expected_output_files_count == 0) {
+ ASSERT_TRUE(!dir_content.has_value() || dir_content->empty())
+ << "Empty directory is expected";
+ } else {
+ ASSERT_TRUE(dir_content.has_value()) << "Test directory is empty!";
+ EXPECT_EQ(dir_content->size(), expected_output_files_count);
+ for (const auto& entry : *dir_content) {
+ if (test::DirExists(entry)) {
+ EXPECT_TRUE(test::RemoveDir(entry))
+ << "Failed to remove sub directory: " << entry;
+ } else if (test::FileExists(entry)) {
+ EXPECT_TRUE(test::RemoveFile(entry))
+ << "Failed to remove file: " << entry;
+ } else {
+ FAIL() << "Can't remove unknown file type: " << entry;
+ }
+ }
+ }
+ EXPECT_TRUE(test::RemoveDir(dir)) << "Failed to remove directory: " << dir;
+}
+
+VideoFrame CreateFrame(test::FrameGeneratorInterface& frame_generator) {
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator.NextFrame();
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .build();
+}
+
+std::unique_ptr<test::FrameGeneratorInterface> CreateFrameGenerator(
+ size_t width,
+ size_t height) {
+ return test::CreateSquareFrameGenerator(width, height,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+}
+
+void AssertFrameIdsAre(const std::string& filename,
+ std::vector<std::string> expected_ids) {
+ FILE* file = fopen(filename.c_str(), "r");
+ ASSERT_TRUE(file != nullptr) << "Failed to open frame ids file: " << filename;
+ std::vector<std::string> actual_ids;
+ char buffer[8];
+ while (fgets(buffer, sizeof buffer, file) != nullptr) {
+ std::string current_id(buffer);
+ EXPECT_GE(current_id.size(), 2lu)
+ << "Found invalid frame id: [" << current_id << "]";
+ if (current_id.size() < 2) {
+ continue;
+ }
+ // Trim "\n" at the end.
+ actual_ids.push_back(current_id.substr(0, current_id.size() - 1));
+ }
+ fclose(file);
+ EXPECT_THAT(actual_ids, ElementsAreArray(expected_ids));
+}
+
+class AnalyzingVideoSinkTest : public Test {
+ protected:
+ ~AnalyzingVideoSinkTest() override = default;
+
+ void SetUp() override {
+ // Create an empty temporary directory for this test.
+ test_directory_ = test::JoinFilename(
+ test::OutputPath(),
+ "TestDir_AnalyzingVideoSinkTest_" +
+ std::string(
+ testing::UnitTest::GetInstance()->current_test_info()->name()));
+ test::CreateDir(test_directory_);
+ }
+
+ void TearDown() override {
+ CleanDir(test_directory_, expected_output_files_count_);
+ }
+
+ void ExpectOutputFilesCount(size_t count) {
+ expected_output_files_count_ = count;
+ }
+
+ std::string test_directory_;
+ size_t expected_output_files_count_ = 0;
+};
+
+TEST_F(AnalyzingVideoSinkTest, VideoFramesAreDumpedCorrectly) {
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/1280, /*height=*/720);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(1)));
+
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Actual should be downscaled version of expected.
+ EXPECT_GT(ssim, 0.98);
+ EXPECT_GT(psnr, 38);
+
+ ExpectOutputFilesCount(1);
+}
+
+TEST_F(AnalyzingVideoSinkTest,
+ FallbackOnConfigResolutionIfNoSubscriptionProvided) {
+ VideoSubscription subscription;
+ VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/320, /*height=*/240);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(1)));
+
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(ssim, 1.00);
+ EXPECT_DOUBLE_EQ(psnr, 48);
+
+ ExpectOutputFilesCount(1);
+}
+
+TEST_F(AnalyzingVideoSinkTest,
+ FallbackOnConfigResolutionIfNoSubscriptionIsNotResolved) {
+ VideoSubscription subscription;
+ subscription.SubscribeToAllPeers(
+ VideoResolution(VideoResolution::Spec::kMaxFromSender));
+ VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/320, /*height=*/240);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(1)));
+
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(ssim, 1.00);
+ EXPECT_DOUBLE_EQ(psnr, 48);
+
+ ExpectOutputFilesCount(1);
+}
+
+TEST_F(AnalyzingVideoSinkTest,
+ VideoFramesAreDumpedCorrectlyWhenSubscriptionChanged) {
+ VideoSubscription subscription_before;
+ subscription_before.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30));
+ VideoSubscription subscription_after;
+ subscription_after.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/1280, /*height=*/720);
+ VideoFrame frame_before = CreateFrame(*frame_generator);
+ frame_before.set_id(
+ analyzer.OnFrameCaptured("alice", "alice_video", frame_before));
+ VideoFrame frame_after = CreateFrame(*frame_generator);
+ frame_after.set_id(
+ analyzer.OnFrameCaptured("alice", "alice_video", frame_after));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription_before, /*report_infra_stats=*/false);
+ sink.OnFrame(frame_before);
+
+ sink.UpdateSubscription(subscription_after);
+ sink.OnFrame(frame_after);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(2)));
+
+ {
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_1280x720_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame_before.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(ssim, 1.00);
+ EXPECT_DOUBLE_EQ(psnr, 48);
+ }
+ {
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame_after.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Actual should be downscaled version of expected.
+ EXPECT_GT(ssim, 0.98);
+ EXPECT_GT(psnr, 38);
+ }
+
+ ExpectOutputFilesCount(2);
+}
+
+TEST_F(AnalyzingVideoSinkTest,
+ VideoFramesAreDumpedCorrectlyWhenSubscriptionChangedOnTheSameOne) {
+ VideoSubscription subscription_before;
+ subscription_before.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30));
+ VideoSubscription subscription_after;
+ subscription_after.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/640, /*height=*/360, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/640, /*height=*/360);
+ VideoFrame frame_before = CreateFrame(*frame_generator);
+ frame_before.set_id(
+ analyzer.OnFrameCaptured("alice", "alice_video", frame_before));
+ VideoFrame frame_after = CreateFrame(*frame_generator);
+ frame_after.set_id(
+ analyzer.OnFrameCaptured("alice", "alice_video", frame_after));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription_before, /*report_infra_stats=*/false);
+ sink.OnFrame(frame_before);
+
+ sink.UpdateSubscription(subscription_after);
+ sink.OnFrame(frame_after);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(2)));
+
+ {
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(2));
+ // Read the first frame.
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame_before.video_frame_buffer()->ToI420();
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00);
+ EXPECT_DOUBLE_EQ(I420PSNR(*expected_frame, *actual_frame), 48);
+ // Read the second frame.
+ actual_frame = frame_reader->PullFrame();
+ expected_frame = frame_after.video_frame_buffer()->ToI420();
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00);
+ EXPECT_DOUBLE_EQ(I420PSNR(*expected_frame, *actual_frame), 48);
+ }
+
+ ExpectOutputFilesCount(1);
+}
+
+TEST_F(AnalyzingVideoSinkTest, SmallDiviationsInAspectRationAreAllowed) {
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/480, /*height=*/270, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/480, /*height=*/270,
+ /*fps=*/30);
+ video_config.output_dump_options = VideoDumpOptions(test_directory_);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ // Generator produces downscaled frames with a bit different aspect ration.
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/240, /*height=*/136);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame);
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(1)));
+
+ {
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_480x270_30.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(1));
+ // Read the first frame.
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame.video_frame_buffer()->ToI420();
+ // Actual frame is upscaled version of the expected. But because rendered
+ // resolution is equal to the actual frame size we need to upscale expected
+ // during comparison and then they have to be the same.
+ EXPECT_DOUBLE_EQ(I420SSIM(*actual_frame, *expected_frame), 1);
+ EXPECT_DOUBLE_EQ(I420PSNR(*actual_frame, *expected_frame), 48);
+ }
+
+ ExpectOutputFilesCount(1);
+}
+
+TEST_F(AnalyzingVideoSinkTest, VideoFramesIdsAreDumpedWhenRequested) {
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/320, /*height=*/240, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240,
+ /*fps=*/30);
+ video_config.output_dump_options =
+ VideoDumpOptions(test_directory_, /*export_frame_ids=*/true);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/320, /*height=*/240);
+
+ std::vector<std::string> expected_frame_ids;
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ for (int i = 0; i < 10; ++i) {
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+ expected_frame_ids.push_back(std::to_string(frame.id()));
+ sink.OnFrame(frame);
+ }
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(10)));
+
+ AssertFrameIdsAre(
+ test::JoinFilename(test_directory_,
+ "alice_video_bob_320x240_30.frame_ids.txt"),
+ expected_frame_ids);
+
+ ExpectOutputFilesCount(2);
+}
+
+TEST_F(AnalyzingVideoSinkTest,
+ VideoFramesAndIdsAreDumpedWithFixedFpsWhenRequested) {
+ GlobalSimulatedTimeController simulated_time(Timestamp::Seconds(100000));
+
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/320, /*height=*/240, /*fps=*/10));
+ VideoConfig video_config("alice_video", /*width=*/320, /*height=*/240,
+ /*fps=*/10);
+ video_config.output_dump_options =
+ VideoDumpOptions(test_directory_, /*export_frame_ids=*/true);
+ video_config.output_dump_use_fixed_framerate = true;
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/320, /*height=*/240);
+
+ VideoFrame frame1 = CreateFrame(*frame_generator);
+ frame1.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame1));
+ VideoFrame frame2 = CreateFrame(*frame_generator);
+ frame2.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame2));
+
+ {
+ // `helper` and `sink` has to be destroyed so all frames will be written
+ // to the disk.
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", simulated_time.GetClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame1);
+ // Advance almost 1 second, so the first frame has to be repeated 9 time
+ // more.
+ simulated_time.AdvanceTime(TimeDelta::Millis(990));
+ sink.OnFrame(frame2);
+ simulated_time.AdvanceTime(TimeDelta::Millis(100));
+ }
+
+ EXPECT_THAT(analyzer.frames_rendered(), Eq(static_cast<uint64_t>(2)));
+
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_320x240_10.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(11));
+ for (int i = 0; i < 10; ++i) {
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame1.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(ssim, 1.00);
+ EXPECT_DOUBLE_EQ(psnr, 48);
+ }
+ rtc::scoped_refptr<I420Buffer> actual_frame = frame_reader->PullFrame();
+ rtc::scoped_refptr<I420BufferInterface> expected_frame =
+ frame2.video_frame_buffer()->ToI420();
+ double psnr = I420PSNR(*expected_frame, *actual_frame);
+ double ssim = I420SSIM(*expected_frame, *actual_frame);
+ // Frames should be equal.
+ EXPECT_DOUBLE_EQ(ssim, 1.00);
+ EXPECT_DOUBLE_EQ(psnr, 48);
+
+ AssertFrameIdsAre(
+ test::JoinFilename(test_directory_,
+ "alice_video_bob_320x240_10.frame_ids.txt"),
+ {std::to_string(frame1.id()), std::to_string(frame1.id()),
+ std::to_string(frame1.id()), std::to_string(frame1.id()),
+ std::to_string(frame1.id()), std::to_string(frame1.id()),
+ std::to_string(frame1.id()), std::to_string(frame1.id()),
+ std::to_string(frame1.id()), std::to_string(frame1.id()),
+ std::to_string(frame2.id())});
+
+ ExpectOutputFilesCount(2);
+}
+
+TEST_F(AnalyzingVideoSinkTest, InfraMetricsCollectedWhenRequested) {
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360,
+ /*fps=*/30);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/640, /*height=*/360);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/true);
+ sink.OnFrame(frame);
+
+ AnalyzingVideoSink::Stats stats = sink.stats();
+ EXPECT_THAT(stats.scaling_tims_ms.NumSamples(), Eq(1));
+ EXPECT_THAT(stats.scaling_tims_ms.GetAverage(), Ge(0));
+ EXPECT_THAT(stats.analyzing_sink_processing_time_ms.NumSamples(), Eq(1));
+ EXPECT_THAT(stats.analyzing_sink_processing_time_ms.GetAverage(),
+ Ge(stats.scaling_tims_ms.GetAverage()));
+
+ ExpectOutputFilesCount(0);
+}
+
+TEST_F(AnalyzingVideoSinkTest, InfraMetricsNotCollectedWhenNotRequested) {
+ VideoSubscription subscription;
+ subscription.SubscribeToPeer(
+ "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30));
+ VideoConfig video_config("alice_video", /*width=*/640, /*height=*/360,
+ /*fps=*/30);
+
+ ExampleVideoQualityAnalyzer analyzer;
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ CreateFrameGenerator(/*width=*/640, /*height=*/360);
+ VideoFrame frame = CreateFrame(*frame_generator);
+ frame.set_id(analyzer.OnFrameCaptured("alice", "alice_video", frame));
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", video_config);
+ AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper,
+ subscription, /*report_infra_stats=*/false);
+ sink.OnFrame(frame);
+
+ AnalyzingVideoSink::Stats stats = sink.stats();
+ EXPECT_THAT(stats.scaling_tims_ms.NumSamples(), Eq(0));
+ EXPECT_THAT(stats.analyzing_sink_processing_time_ms.NumSamples(), Eq(0));
+
+ ExpectOutputFilesCount(0);
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc
new file mode 100644
index 0000000000..70dc4b00b5
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h"
+
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/video/video_frame_writer.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+void AnalyzingVideoSinksHelper::AddConfig(absl::string_view sender_peer_name,
+ VideoConfig config) {
+ MutexLock lock(&mutex_);
+ auto it = video_configs_.find(*config.stream_label);
+ if (it == video_configs_.end()) {
+ std::string stream_label = *config.stream_label;
+ video_configs_.emplace(
+ std::move(stream_label),
+ std::pair{std::string(sender_peer_name), std::move(config)});
+ } else {
+ it->second = std::pair{std::string(sender_peer_name), std::move(config)};
+ }
+}
+
+absl::optional<std::pair<std::string, VideoConfig>>
+AnalyzingVideoSinksHelper::GetPeerAndConfig(absl::string_view stream_label) {
+ MutexLock lock(&mutex_);
+ auto it = video_configs_.find(std::string(stream_label));
+ if (it == video_configs_.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
+void AnalyzingVideoSinksHelper::RemoveConfig(absl::string_view stream_label) {
+ MutexLock lock(&mutex_);
+ video_configs_.erase(std::string(stream_label));
+}
+
+test::VideoFrameWriter* AnalyzingVideoSinksHelper::AddVideoWriter(
+ std::unique_ptr<test::VideoFrameWriter> video_writer) {
+ MutexLock lock(&mutex_);
+ test::VideoFrameWriter* out = video_writer.get();
+ video_writers_.push_back(std::move(video_writer));
+ return out;
+}
+
+void AnalyzingVideoSinksHelper::CloseAndRemoveVideoWriters(
+ std::set<test::VideoFrameWriter*> writers_to_close) {
+ MutexLock lock(&mutex_);
+ for (auto it = video_writers_.cbegin(); it != video_writers_.cend();) {
+ if (writers_to_close.find(it->get()) != writers_to_close.end()) {
+ (*it)->Close();
+ it = video_writers_.erase(it);
+ } else {
+ ++it;
+ }
+ }
+}
+
+void AnalyzingVideoSinksHelper::Clear() {
+ MutexLock lock(&mutex_);
+ video_configs_.clear();
+ for (const auto& video_writer : video_writers_) {
+ video_writer->Close();
+ }
+ video_writers_.clear();
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h
new file mode 100644
index 0000000000..5f38c5a40e
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_
+
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/video/video_frame_writer.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Registry of known video configs and video writers.
+// This class is thread safe.
+class AnalyzingVideoSinksHelper {
+ public:
+ // Adds config in the registry. If config with such stream label was
+ // registered before, the new value will override the old one.
+ void AddConfig(absl::string_view sender_peer_name, VideoConfig config);
+ absl::optional<std::pair<std::string, VideoConfig>> GetPeerAndConfig(
+ absl::string_view stream_label);
+ // Removes video config for specified stream label. If there are no know video
+ // config for such stream label - does nothing.
+ void RemoveConfig(absl::string_view stream_label);
+
+ // Takes ownership of the provided video writer. All video writers owned by
+ // this class will be closed during `AnalyzingVideoSinksHelper` destruction
+ // and guaranteed to be alive either until explicitly removed by
+ // `CloseAndRemoveVideoWriters` or until `AnalyzingVideoSinksHelper` is
+ // destroyed.
+ //
+ // Returns pointer to the added writer. Ownership is maintained by
+ // `AnalyzingVideoSinksHelper`.
+ test::VideoFrameWriter* AddVideoWriter(
+ std::unique_ptr<test::VideoFrameWriter> video_writer);
+ // For each provided `writers_to_close`, if it is known, will close and
+ // destroy it, otherwise does nothing with it.
+ void CloseAndRemoveVideoWriters(
+ std::set<test::VideoFrameWriter*> writers_to_close);
+
+ // Removes all added configs and close and removes all added writers.
+ void Clear();
+
+ private:
+ Mutex mutex_;
+ std::map<std::string, std::pair<std::string, VideoConfig>> video_configs_
+ RTC_GUARDED_BY(mutex_);
+ std::list<std::unique_ptr<test::VideoFrameWriter>> video_writers_
+ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_ANALYZING_VIDEO_SINKS_HELPER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc
new file mode 100644
index 0000000000..1a820a5229
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc
@@ -0,0 +1,160 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/test/pclf/media_configuration.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::Eq;
+
+// Asserts equality of the main fields of the video config. We don't compare
+// the full config due to the lack of equality definition for a lot of subtypes.
+void AssertConfigsAreEquals(const VideoConfig& actual,
+ const VideoConfig& expected) {
+ EXPECT_THAT(actual.stream_label, Eq(expected.stream_label));
+ EXPECT_THAT(actual.width, Eq(expected.width));
+ EXPECT_THAT(actual.height, Eq(expected.height));
+ EXPECT_THAT(actual.fps, Eq(expected.fps));
+}
+
+TEST(AnalyzingVideoSinksHelperTest, ConfigsCanBeAdded) {
+ VideoConfig config("alice_video", /*width=*/1280, /*height=*/720, /*fps=*/30);
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", config);
+
+ absl::optional<std::pair<std::string, VideoConfig>> registred_config =
+ helper.GetPeerAndConfig("alice_video");
+ ASSERT_TRUE(registred_config.has_value());
+ EXPECT_THAT(registred_config->first, Eq("alice"));
+ AssertConfigsAreEquals(registred_config->second, config);
+}
+
+TEST(AnalyzingVideoSinksHelperTest, AddingForExistingLabelWillOverwriteValue) {
+ VideoConfig config_before("alice_video", /*width=*/1280, /*height=*/720,
+ /*fps=*/30);
+ VideoConfig config_after("alice_video", /*width=*/640, /*height=*/360,
+ /*fps=*/15);
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", config_before);
+
+ absl::optional<std::pair<std::string, VideoConfig>> registred_config =
+ helper.GetPeerAndConfig("alice_video");
+ ASSERT_TRUE(registred_config.has_value());
+ EXPECT_THAT(registred_config->first, Eq("alice"));
+ AssertConfigsAreEquals(registred_config->second, config_before);
+
+ helper.AddConfig("alice", config_after);
+
+ registred_config = helper.GetPeerAndConfig("alice_video");
+ ASSERT_TRUE(registred_config.has_value());
+ EXPECT_THAT(registred_config->first, Eq("alice"));
+ AssertConfigsAreEquals(registred_config->second, config_after);
+}
+
+TEST(AnalyzingVideoSinksHelperTest, ConfigsCanBeRemoved) {
+ VideoConfig config("alice_video", /*width=*/1280, /*height=*/720, /*fps=*/30);
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", config);
+
+ ASSERT_TRUE(helper.GetPeerAndConfig("alice_video").has_value());
+
+ helper.RemoveConfig("alice_video");
+ ASSERT_FALSE(helper.GetPeerAndConfig("alice_video").has_value());
+}
+
+TEST(AnalyzingVideoSinksHelperTest, RemoveOfNonExistingConfigDontCrash) {
+ AnalyzingVideoSinksHelper helper;
+ helper.RemoveConfig("alice_video");
+}
+
+TEST(AnalyzingVideoSinksHelperTest, ClearRemovesAllConfigs) {
+ VideoConfig config1("alice_video", /*width=*/640, /*height=*/360, /*fps=*/30);
+ VideoConfig config2("bob_video", /*width=*/640, /*height=*/360, /*fps=*/30);
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddConfig("alice", config1);
+ helper.AddConfig("bob", config2);
+
+ ASSERT_TRUE(helper.GetPeerAndConfig("alice_video").has_value());
+ ASSERT_TRUE(helper.GetPeerAndConfig("bob_video").has_value());
+
+ helper.Clear();
+ ASSERT_FALSE(helper.GetPeerAndConfig("alice_video").has_value());
+ ASSERT_FALSE(helper.GetPeerAndConfig("bob_video").has_value());
+}
+
+struct TestVideoFrameWriterFactory {
+ int closed_writers_count = 0;
+ int deleted_writers_count = 0;
+
+ std::unique_ptr<test::VideoFrameWriter> CreateWriter() {
+ return std::make_unique<TestVideoFrameWriter>(this);
+ }
+
+ private:
+ class TestVideoFrameWriter : public test::VideoFrameWriter {
+ public:
+ explicit TestVideoFrameWriter(TestVideoFrameWriterFactory* factory)
+ : factory_(factory) {}
+ ~TestVideoFrameWriter() override { factory_->deleted_writers_count++; }
+
+ bool WriteFrame(const VideoFrame& frame) override { return true; }
+
+ void Close() override { factory_->closed_writers_count++; }
+
+ private:
+ TestVideoFrameWriterFactory* factory_;
+ };
+};
+
+TEST(AnalyzingVideoSinksHelperTest, RemovingWritersCloseAndDestroyAllOfThem) {
+ TestVideoFrameWriterFactory factory;
+
+ AnalyzingVideoSinksHelper helper;
+ test::VideoFrameWriter* writer1 =
+ helper.AddVideoWriter(factory.CreateWriter());
+ test::VideoFrameWriter* writer2 =
+ helper.AddVideoWriter(factory.CreateWriter());
+
+ helper.CloseAndRemoveVideoWriters({writer1, writer2});
+
+ EXPECT_THAT(factory.closed_writers_count, Eq(2));
+ EXPECT_THAT(factory.deleted_writers_count, Eq(2));
+}
+
+TEST(AnalyzingVideoSinksHelperTest, ClearCloseAndDestroyAllWriters) {
+ TestVideoFrameWriterFactory factory;
+
+ AnalyzingVideoSinksHelper helper;
+ helper.AddVideoWriter(factory.CreateWriter());
+ helper.AddVideoWriter(factory.CreateWriter());
+
+ helper.Clear();
+
+ EXPECT_THAT(factory.closed_writers_count, Eq(2));
+ EXPECT_THAT(factory.deleted_writers_count, Eq(2));
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
new file mode 100644
index 0000000000..59144589fc
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc
@@ -0,0 +1,1228 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/metrics/metric.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+using ::webrtc::webrtc_pc_e2e::MetricMetadataKey;
+
+constexpr int kBitsInByte = 8;
+constexpr absl::string_view kSkipRenderedFrameReasonProcessed = "processed";
+constexpr absl::string_view kSkipRenderedFrameReasonRendered = "rendered";
+constexpr absl::string_view kSkipRenderedFrameReasonDropped =
+ "considered dropped";
+
+void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Captured : " << counters.captured;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Pre encoded : " << counters.pre_encoded;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Encoded : " << counters.encoded;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Received : " << counters.received;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Decoded : " << counters.decoded;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Rendered : " << counters.rendered;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Dropped : " << counters.dropped;
+ RTC_LOG(LS_INFO) << "[" << name
+ << "] Failed to decode : " << counters.failed_to_decode;
+}
+
+void LogStreamInternalStats(const std::string& name,
+ const StreamStats& stats,
+ Timestamp start_time) {
+ for (const auto& entry : stats.dropped_by_phase) {
+ RTC_LOG(LS_INFO) << "[" << name << "] Dropped at " << ToString(entry.first)
+ << ": " << entry.second;
+ }
+ Timestamp first_encoded_frame_time = Timestamp::PlusInfinity();
+ for (const StreamCodecInfo& encoder : stats.encoders) {
+ RTC_DCHECK(encoder.switched_on_at.IsFinite());
+ RTC_DCHECK(encoder.switched_from_at.IsFinite());
+ if (first_encoded_frame_time.IsInfinite()) {
+ first_encoded_frame_time = encoder.switched_on_at;
+ }
+ RTC_LOG(LS_INFO)
+ << "[" << name << "] Used encoder: \"" << encoder.codec_name
+ << "\" used from (frame_id=" << encoder.first_frame_id
+ << "; from_stream_start="
+ << (encoder.switched_on_at - stats.stream_started_time).ms()
+ << "ms, from_call_start=" << (encoder.switched_on_at - start_time).ms()
+ << "ms) until (frame_id=" << encoder.last_frame_id
+ << "; from_stream_start="
+ << (encoder.switched_from_at - stats.stream_started_time).ms()
+ << "ms, from_call_start="
+ << (encoder.switched_from_at - start_time).ms() << "ms)";
+ }
+ for (const StreamCodecInfo& decoder : stats.decoders) {
+ RTC_DCHECK(decoder.switched_on_at.IsFinite());
+ RTC_DCHECK(decoder.switched_from_at.IsFinite());
+ RTC_LOG(LS_INFO)
+ << "[" << name << "] Used decoder: \"" << decoder.codec_name
+ << "\" used from (frame_id=" << decoder.first_frame_id
+ << "; from_stream_start="
+ << (decoder.switched_on_at - stats.stream_started_time).ms()
+ << "ms, from_call_start=" << (decoder.switched_on_at - start_time).ms()
+ << "ms) until (frame_id=" << decoder.last_frame_id
+ << "; from_stream_start="
+ << (decoder.switched_from_at - stats.stream_started_time).ms()
+ << "ms, from_call_start="
+ << (decoder.switched_from_at - start_time).ms() << "ms)";
+ }
+}
+
+template <typename T>
+absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
+ auto it = map.find(key);
+ if (it == map.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
+SamplesStatsCounter::StatsSample StatsSample(double value,
+ Timestamp sampling_time) {
+ return SamplesStatsCounter::StatsSample{value, sampling_time};
+}
+
+} // namespace
+
+DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
+ webrtc::Clock* clock,
+ test::MetricsLogger* metrics_logger,
+ DefaultVideoQualityAnalyzerOptions options)
+ : options_(options),
+ clock_(clock),
+ metrics_logger_(metrics_logger),
+ frames_comparator_(clock, cpu_measurer_, options) {
+ RTC_CHECK(metrics_logger_);
+}
+
+DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
+ Stop();
+}
+
+void DefaultVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
+ test_label_ = std::move(test_case_name);
+ frames_comparator_.Start(max_threads_count);
+ {
+ MutexLock lock(&mutex_);
+ peers_ = std::make_unique<NamesCollection>(peer_names);
+ RTC_CHECK(start_time_.IsMinusInfinity());
+
+ RTC_CHECK_EQ(state_, State::kNew)
+ << "DefaultVideoQualityAnalyzer is already started";
+ state_ = State::kActive;
+ start_time_ = Now();
+ }
+}
+
+uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
+ const std::string& stream_label,
+ const webrtc::VideoFrame& frame) {
+ // `next_frame_id` is atomic, so we needn't lock here.
+ Timestamp captured_time = Now();
+ Timestamp start_time = Timestamp::MinusInfinity();
+ size_t peer_index = -1;
+ size_t peers_count = -1;
+ size_t stream_index;
+ uint16_t frame_id = VideoFrame::kNotSetId;
+ {
+ MutexLock lock(&mutex_);
+ frame_id = GetNextFrameId();
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+ // Create a local copy of `start_time_`, peer's index and total peers count
+ // to access it without holding a `mutex_` during access to
+ // `frames_comparator_`.
+ start_time = start_time_;
+ peer_index = peers_->index(peer_name);
+ peers_count = peers_->size();
+ stream_index = streams_.AddIfAbsent(stream_label);
+ }
+ // Ensure stats for this stream exists.
+ frames_comparator_.EnsureStatsForStream(stream_index, peer_index, peers_count,
+ captured_time, start_time);
+ {
+ MutexLock lock(&mutex_);
+ stream_to_sender_[stream_index] = peer_index;
+ frame_counters_.captured++;
+ for (size_t i : peers_->GetAllIndexes()) {
+ if (i != peer_index || options_.enable_receive_own_stream) {
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_[key].captured++;
+ }
+ }
+
+ std::set<size_t> frame_receivers_indexes = peers_->GetPresentIndexes();
+ if (!options_.enable_receive_own_stream) {
+ frame_receivers_indexes.erase(peer_index);
+ }
+
+ auto state_it = stream_states_.find(stream_index);
+ if (state_it == stream_states_.end()) {
+ stream_states_.emplace(
+ stream_index,
+ StreamState(peer_index, frame_receivers_indexes, captured_time));
+ }
+ StreamState* state = &stream_states_.at(stream_index);
+ state->PushBack(frame_id);
+ // Update frames in flight info.
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it != captured_frames_in_flight_.end()) {
+ // If we overflow uint16_t and hit previous frame id and this frame is
+ // still in flight, it means that this stream wasn't rendered for long
+ // time and we need to process existing frame as dropped.
+ for (size_t i : peers_->GetPresentIndexes()) {
+ if (i == peer_index && !options_.enable_receive_own_stream) {
+ continue;
+ }
+
+ uint16_t oldest_frame_id = state->PopFront(i);
+ RTC_DCHECK_EQ(frame_id, oldest_frame_id);
+ frame_counters_.dropped++;
+ InternalStatsKey key(stream_index, peer_index, i);
+ stream_frame_counters_.at(key).dropped++;
+
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ StatsSample(captured_frames_in_flight_.size(), Now()));
+ frames_comparator_.AddComparison(
+ InternalStatsKey(stream_index, peer_index, i),
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt, FrameComparisonType::kDroppedFrame,
+ it->second.GetStatsForPeer(i));
+ }
+
+ captured_frames_in_flight_.erase(it);
+ }
+ captured_frames_in_flight_.emplace(
+ frame_id, FrameInFlight(stream_index, frame, captured_time,
+ std::move(frame_receivers_indexes)));
+ // Set frame id on local copy of the frame
+ captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
+
+ // Update history stream<->frame mapping
+ for (auto it = stream_to_frame_id_history_.begin();
+ it != stream_to_frame_id_history_.end(); ++it) {
+ it->second.erase(frame_id);
+ }
+ stream_to_frame_id_history_[stream_index].insert(frame_id);
+ stream_to_frame_id_full_history_[stream_index].push_back(frame_id);
+
+ // If state has too many frames that are in flight => remove the oldest
+ // queued frame in order to avoid to use too much memory.
+ if (state->GetAliveFramesCount() >
+ options_.max_frames_in_flight_per_stream_count) {
+ uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
+ auto it = captured_frames_in_flight_.find(frame_id_to_remove);
+ RTC_CHECK(it != captured_frames_in_flight_.end())
+ << "Frame with ID " << frame_id_to_remove
+ << " is expected to be in flight, but hasn't been found in "
+ << "|captured_frames_in_flight_|";
+ bool is_removed = it->second.RemoveFrame();
+ RTC_DCHECK(is_removed)
+ << "Invalid stream state: alive frame is removed already";
+ }
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_captured_processing_time_ms.AddSample(
+ (Now() - captured_time).ms<double>());
+ }
+ }
+ return frame_id;
+}
+
+void DefaultVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame) {
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ auto it = captured_frames_in_flight_.find(frame.id());
+ RTC_CHECK(it != captured_frames_in_flight_.end())
+ << "Frame id=" << frame.id() << " not found";
+ FrameInFlight& frame_in_flight = it->second;
+ frame_counters_.pre_encoded++;
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i : peers_->GetAllIndexes()) {
+ if (i != peer_index || options_.enable_receive_own_stream) {
+ InternalStatsKey key(frame_in_flight.stream(), peer_index, i);
+ stream_frame_counters_.at(key).pre_encoded++;
+ }
+ }
+ frame_in_flight.SetPreEncodeTime(Now());
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_pre_encode_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
+ uint16_t frame_id,
+ const webrtc::EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) {
+ if (discarded)
+ return;
+
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it == captured_frames_in_flight_.end()) {
+ RTC_LOG(LS_WARNING)
+ << "The encoding of video frame with id [" << frame_id << "] for peer ["
+ << peer_name << "] finished after all receivers rendered this frame or "
+ << "were removed. It can be OK for simulcast/SVC if higher quality "
+ << "stream is not required or the last receiver was unregistered "
+ << "between encoding of different layers, but it may indicate an ERROR "
+ << "for singlecast or if it happens often.";
+ return;
+ }
+ FrameInFlight& frame_in_flight = it->second;
+ // For SVC we can receive multiple encoded images for one frame, so to cover
+ // all cases we have to pick the last encode time.
+ if (!frame_in_flight.HasEncodedTime()) {
+ // Increase counters only when we meet this frame first time.
+ frame_counters_.encoded++;
+ size_t peer_index = peers_->index(peer_name);
+ for (size_t i : peers_->GetAllIndexes()) {
+ if (i != peer_index || options_.enable_receive_own_stream) {
+ InternalStatsKey key(frame_in_flight.stream(), peer_index, i);
+ stream_frame_counters_.at(key).encoded++;
+ }
+ }
+ }
+ Timestamp now = Now();
+ StreamCodecInfo used_encoder;
+ used_encoder.codec_name = stats.encoder_name;
+ used_encoder.first_frame_id = frame_id;
+ used_encoder.last_frame_id = frame_id;
+ used_encoder.switched_on_at = now;
+ used_encoder.switched_from_at = now;
+ frame_in_flight.OnFrameEncoded(
+ now, encoded_image._frameType, DataSize::Bytes(encoded_image.size()),
+ stats.target_encode_bitrate, encoded_image.SpatialIndex().value_or(0),
+ stats.qp, used_encoder);
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_encoded_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
+ webrtc::EncodedImageCallback::DropReason reason) {
+ // Here we do nothing, because we will see this drop on renderer side.
+}
+
+void DefaultVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
+ uint16_t frame_id,
+ const webrtc::EncodedImage& input_image) {
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ size_t peer_index = peers_->index(peer_name);
+
+ if (frame_id == VideoFrame::kNotSetId) {
+ frame_counters_.received++;
+ unknown_sender_frame_counters_[std::string(peer_name)].received++;
+ return;
+ }
+
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasReceivedTime(peer_index)) {
+ // It means this frame was predecoded before, so we can skip it. It may
+ // happen when we have multiple simulcast streams in one track and received
+ // the same picture from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+
+ frame_counters_.received++;
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).received++;
+ // Determine the time of the last received packet of this video frame.
+ RTC_DCHECK(!input_image.PacketInfos().empty());
+ Timestamp last_receive_time =
+ std::max_element(input_image.PacketInfos().cbegin(),
+ input_image.PacketInfos().cend(),
+ [](const RtpPacketInfo& a, const RtpPacketInfo& b) {
+ return a.receive_time() < b.receive_time();
+ })
+ ->receive_time();
+ it->second.OnFramePreDecode(peer_index,
+ /*received_time=*/last_receive_time,
+ /*decode_start_time=*/Now(),
+ input_image._frameType,
+ DataSize::Bytes(input_image.size()));
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_pre_decode_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame,
+ const DecoderStats& stats) {
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ size_t peer_index = peers_->index(peer_name);
+
+ if (frame.id() == VideoFrame::kNotSetId) {
+ frame_counters_.decoded++;
+ unknown_sender_frame_counters_[std::string(peer_name)].decoded++;
+ return;
+ }
+
+ auto it = captured_frames_in_flight_.find(frame.id());
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasDecodeEndTime(peer_index)) {
+ // It means this frame was decoded before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same frame from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+ frame_counters_.decoded++;
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).decoded++;
+ Timestamp now = Now();
+ StreamCodecInfo used_decoder;
+ used_decoder.codec_name = stats.decoder_name;
+ used_decoder.first_frame_id = frame.id();
+ used_decoder.last_frame_id = frame.id();
+ used_decoder.switched_on_at = now;
+ used_decoder.switched_from_at = now;
+ it->second.OnFrameDecoded(peer_index, now, frame.width(), frame.height(),
+ used_decoder);
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_decoded_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::OnFrameRendered(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame) {
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ size_t peer_index = peers_->index(peer_name);
+
+ if (frame.id() == VideoFrame::kNotSetId) {
+ frame_counters_.rendered++;
+ unknown_sender_frame_counters_[std::string(peer_name)].rendered++;
+ return;
+ }
+
+ auto frame_it = captured_frames_in_flight_.find(frame.id());
+ if (frame_it == captured_frames_in_flight_.end() ||
+ frame_it->second.HasRenderedTime(peer_index) ||
+ frame_it->second.IsDropped(peer_index)) {
+ // It means this frame was rendered or dropped before, so we can skip it.
+ // It may happen when we have multiple simulcast streams in one track and
+ // received the same frame from two different streams because SFU can't
+ // reliably correlate two simulcast streams and started relaying the second
+ // stream from the same frame it has relayed right before for the first
+ // stream.
+ absl::string_view reason = kSkipRenderedFrameReasonProcessed;
+ if (frame_it != captured_frames_in_flight_.end()) {
+ if (frame_it->second.HasRenderedTime(peer_index)) {
+ reason = kSkipRenderedFrameReasonRendered;
+ } else if (frame_it->second.IsDropped(peer_index)) {
+ reason = kSkipRenderedFrameReasonDropped;
+ }
+ }
+ RTC_LOG(LS_WARNING)
+ << "Peer " << peer_name
+ << "; Received frame out of order: received frame with id "
+ << frame.id() << " which was " << reason << " before";
+ return;
+ }
+
+ // Find corresponding captured frame.
+ FrameInFlight* frame_in_flight = &frame_it->second;
+ absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
+
+ const size_t stream_index = frame_in_flight->stream();
+ StreamState* state = &stream_states_.at(stream_index);
+ const InternalStatsKey stats_key(stream_index, state->sender(), peer_index);
+
+ // Update frames counters.
+ frame_counters_.rendered++;
+ stream_frame_counters_.at(stats_key).rendered++;
+
+ // Update current frame stats.
+ frame_in_flight->OnFrameRendered(peer_index, Now());
+
+ // After we received frame here we need to check if there are any dropped
+ // frames between this one and last one, that was rendered for this video
+ // stream.
+ int dropped_count = 0;
+ while (!state->IsEmpty(peer_index) &&
+ state->Front(peer_index) != frame.id()) {
+ dropped_count++;
+ uint16_t dropped_frame_id = state->PopFront(peer_index);
+ // Frame with id `dropped_frame_id` was dropped. We need:
+ // 1. Update global and stream frame counters
+ // 2. Extract corresponding frame from `captured_frames_in_flight_`
+ // 3. Send extracted frame to comparison with dropped=true
+ // 4. Cleanup dropped frame
+ frame_counters_.dropped++;
+ stream_frame_counters_.at(stats_key).dropped++;
+
+ auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
+ RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
+ dropped_frame_it->second.MarkDropped(peer_index);
+
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ StatsSample(captured_frames_in_flight_.size(), Now()));
+ frames_comparator_.AddComparison(
+ stats_key, /*captured=*/absl::nullopt, /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame,
+ dropped_frame_it->second.GetStatsForPeer(peer_index));
+
+ if (dropped_frame_it->second.HaveAllPeersReceived()) {
+ captured_frames_in_flight_.erase(dropped_frame_it);
+ }
+ }
+ RTC_DCHECK(!state->IsEmpty(peer_index));
+ state->PopFront(peer_index);
+
+ if (state->last_rendered_frame_time(peer_index)) {
+ frame_in_flight->SetPrevFrameRenderedTime(
+ peer_index, state->last_rendered_frame_time(peer_index).value());
+ }
+ state->SetLastRenderedFrameTime(peer_index,
+ frame_in_flight->rendered_time(peer_index));
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ StatsSample(captured_frames_in_flight_.size(), Now()));
+ frames_comparator_.AddComparison(
+ stats_key, dropped_count, captured_frame, /*rendered=*/frame,
+ FrameComparisonType::kRegular,
+ frame_in_flight->GetStatsForPeer(peer_index));
+
+ if (frame_it->second.HaveAllPeersReceived()) {
+ captured_frames_in_flight_.erase(frame_it);
+ }
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_frame_rendered_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame,
+ int32_t error_code) {
+ RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
+ << ", code=" << error_code;
+}
+
+void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code,
+ const DecoderStats& stats) {
+ RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
+ << ", code=" << error_code;
+
+ Timestamp processing_started = Now();
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ size_t peer_index = peers_->index(peer_name);
+
+ if (frame_id == VideoFrame::kNotSetId) {
+ frame_counters_.failed_to_decode++;
+ unknown_sender_frame_counters_[std::string(peer_name)].failed_to_decode++;
+ return;
+ }
+
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it == captured_frames_in_flight_.end() ||
+ it->second.HasDecodeEndTime(peer_index)) {
+ // It means this frame was decoded before, so we can skip it. It may happen
+ // when we have multiple simulcast streams in one track and received
+ // the same frame from two different streams because SFU can't reliably
+ // correlate two simulcast streams and started relaying the second stream
+ // from the same frame it has relayed right before for the first stream.
+ return;
+ }
+ frame_counters_.failed_to_decode++;
+ InternalStatsKey key(it->second.stream(),
+ stream_to_sender_.at(it->second.stream()), peer_index);
+ stream_frame_counters_.at(key).failed_to_decode++;
+ Timestamp now = Now();
+ StreamCodecInfo used_decoder;
+ used_decoder.codec_name = stats.decoder_name;
+ used_decoder.first_frame_id = frame_id;
+ used_decoder.last_frame_id = frame_id;
+ used_decoder.switched_on_at = now;
+ used_decoder.switched_from_at = now;
+ it->second.OnDecoderError(peer_index, used_decoder);
+
+ if (options_.report_infra_metrics) {
+ analyzer_stats_.on_decoder_error_processing_time_ms.AddSample(
+ (Now() - processing_started).ms<double>());
+ }
+}
+
+void DefaultVideoQualityAnalyzer::RegisterParticipantInCall(
+ absl::string_view peer_name) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK(!peers_->HasName(peer_name));
+ size_t new_peer_index = peers_->AddIfAbsent(peer_name);
+
+ // Ensure stats for receiving (for frames from other peers to this one)
+ // streams exists. Since in flight frames will be sent to the new peer
+ // as well. Sending stats (from this peer to others) will be added by
+ // DefaultVideoQualityAnalyzer::OnFrameCaptured.
+ std::vector<std::pair<InternalStatsKey, Timestamp>> stream_started_time;
+ for (auto [stream_index, sender_peer_index] : stream_to_sender_) {
+ InternalStatsKey key(stream_index, sender_peer_index, new_peer_index);
+
+ // To initiate `FrameCounters` for the stream we should pick frame
+ // counters with the same stream index and the same sender's peer index
+ // and any receiver's peer index and copy from its sender side
+ // counters.
+ FrameCounters counters;
+ for (size_t i : peers_->GetPresentIndexes()) {
+ InternalStatsKey prototype_key(stream_index, sender_peer_index, i);
+ auto it = stream_frame_counters_.find(prototype_key);
+ if (it != stream_frame_counters_.end()) {
+ counters.captured = it->second.captured;
+ counters.pre_encoded = it->second.pre_encoded;
+ counters.encoded = it->second.encoded;
+ break;
+ }
+ }
+ // It may happen if we had only one peer before this method was invoked,
+ // then `counters` will be empty. In such case empty `counters` are ok.
+ stream_frame_counters_.insert({key, std::move(counters)});
+
+ stream_started_time.push_back(
+ {key, stream_states_.at(stream_index).stream_started_time()});
+ }
+ frames_comparator_.RegisterParticipantInCall(stream_started_time,
+ start_time_);
+ // Ensure, that frames states are handled correctly
+ // (e.g. dropped frames tracking).
+ for (auto& [stream_index, stream_state] : stream_states_) {
+ stream_state.AddPeer(new_peer_index);
+ }
+ // Register new peer for every frame in flight.
+ // It is guaranteed, that no garbage FrameInFlight objects will stay in
+ // memory because of adding new peer. Even if the new peer won't receive the
+ // frame, the frame will be removed by OnFrameRendered after next frame comes
+ // for the new peer. It is important because FrameInFlight is a large object.
+ for (auto& [frame_id, frame_in_flight] : captured_frames_in_flight_) {
+ frame_in_flight.AddExpectedReceiver(new_peer_index);
+ }
+}
+
+void DefaultVideoQualityAnalyzer::UnregisterParticipantInCall(
+ absl::string_view peer_name) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK(peers_->HasName(peer_name));
+ absl::optional<size_t> peer_index = peers_->RemoveIfPresent(peer_name);
+ RTC_CHECK(peer_index.has_value());
+
+ for (auto& [stream_index, stream_state] : stream_states_) {
+ if (!options_.enable_receive_own_stream &&
+ peer_index == stream_state.sender()) {
+ continue;
+ }
+
+ AddExistingFramesInFlightForStreamToComparator(stream_index, stream_state,
+ *peer_index);
+
+ stream_state.RemovePeer(*peer_index);
+ }
+
+ // Remove peer from every frame in flight. If we removed that last expected
+ // receiver for the frame, then we should removed this frame if it was
+ // already encoded. If frame wasn't encoded, it still will be used by sender
+ // side pipeline, so we can't delete it yet.
+ for (auto it = captured_frames_in_flight_.begin();
+ it != captured_frames_in_flight_.end();) {
+ FrameInFlight& frame_in_flight = it->second;
+ frame_in_flight.RemoveExpectedReceiver(*peer_index);
+ // If frame was fully sent and all receivers received it, then erase it.
+ // It may happen that when we remove FrameInFlight only some Simulcast/SVC
+ // layers were encoded and frame has encoded time, but more layers might be
+ // encoded after removal. In such case it's safe to still remove a frame,
+ // because OnFrameEncoded method will correctly handle the case when there
+ // is no FrameInFlight for the received encoded image.
+ if (frame_in_flight.HasEncodedTime() &&
+ frame_in_flight.HaveAllPeersReceived()) {
+ it = captured_frames_in_flight_.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+void DefaultVideoQualityAnalyzer::Stop() {
+ std::map<InternalStatsKey, Timestamp> last_rendered_frame_times;
+ {
+ MutexLock lock(&mutex_);
+ if (state_ == State::kStopped) {
+ return;
+ }
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "DefaultVideoQualityAnalyzer has to be started before use";
+
+ state_ = State::kStopped;
+
+ // Add the amount of frames in flight to the analyzer stats before all left
+ // frames in flight will be sent to the `frames_compartor_`.
+ analyzer_stats_.frames_in_flight_left_count.AddSample(
+ StatsSample(captured_frames_in_flight_.size(), Now()));
+
+ for (auto& state_entry : stream_states_) {
+ const size_t stream_index = state_entry.first;
+ StreamState& stream_state = state_entry.second;
+
+ // Populate `last_rendered_frame_times` map for all peers that were met in
+ // call, not only for the currently presented ones.
+ for (size_t peer_index : peers_->GetAllIndexes()) {
+ if (peer_index == stream_state.sender() &&
+ !options_.enable_receive_own_stream) {
+ continue;
+ }
+
+ InternalStatsKey stats_key(stream_index, stream_state.sender(),
+ peer_index);
+
+ // If there are no freezes in the call we have to report
+ // time_between_freezes_ms as call duration and in such case
+ // `stream_last_freeze_end_time` for this stream will be `start_time_`.
+ // If there is freeze, then we need add time from last rendered frame
+ // to last freeze end as time between freezes.
+ if (stream_state.last_rendered_frame_time(peer_index)) {
+ last_rendered_frame_times.emplace(
+ stats_key,
+ stream_state.last_rendered_frame_time(peer_index).value());
+ }
+ }
+
+ // Push left frame in flight for analysis for the peers that are still in
+ // the call.
+ for (size_t peer_index : peers_->GetPresentIndexes()) {
+ if (peer_index == stream_state.sender() &&
+ !options_.enable_receive_own_stream) {
+ continue;
+ }
+
+ AddExistingFramesInFlightForStreamToComparator(
+ stream_index, stream_state, peer_index);
+ }
+ }
+ }
+ frames_comparator_.Stop(last_rendered_frame_times);
+
+ // Perform final Metrics update. On this place analyzer is stopped and no one
+ // holds any locks.
+ {
+ MutexLock lock(&mutex_);
+ FramesComparatorStats frames_comparator_stats =
+ frames_comparator_.frames_comparator_stats();
+ analyzer_stats_.comparisons_queue_size =
+ std::move(frames_comparator_stats.comparisons_queue_size);
+ analyzer_stats_.comparisons_done = frames_comparator_stats.comparisons_done;
+ analyzer_stats_.cpu_overloaded_comparisons_done =
+ frames_comparator_stats.cpu_overloaded_comparisons_done;
+ analyzer_stats_.memory_overloaded_comparisons_done =
+ frames_comparator_stats.memory_overloaded_comparisons_done;
+ }
+ ReportResults();
+}
+
+std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
+ MutexLock lock1(&mutex_);
+ auto it = captured_frames_in_flight_.find(frame_id);
+ if (it != captured_frames_in_flight_.end()) {
+ return streams_.name(it->second.stream());
+ }
+ for (auto hist_it = stream_to_frame_id_history_.begin();
+ hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
+ auto hist_set_it = hist_it->second.find(frame_id);
+ if (hist_set_it != hist_it->second.end()) {
+ return streams_.name(hist_it->first);
+ }
+ }
+ RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
+}
+
+std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
+ MutexLock lock(&mutex_);
+ std::set<StatsKey> out;
+ for (auto& item : frames_comparator_.stream_stats()) {
+ RTC_LOG(LS_INFO) << item.first.ToString() << " ==> "
+ << ToStatsKey(item.first).ToString();
+ out.insert(ToStatsKey(item.first));
+ }
+ return out;
+}
+
+VideoStreamsInfo DefaultVideoQualityAnalyzer::GetKnownStreams() const {
+ MutexLock lock(&mutex_);
+ std::map<std::string, std::string> stream_to_sender;
+ std::map<std::string, std::set<std::string>> sender_to_streams;
+ std::map<std::string, std::set<std::string>> stream_to_receivers;
+
+ for (auto& item : frames_comparator_.stream_stats()) {
+ const std::string& stream_label = streams_.name(item.first.stream);
+ const std::string& sender = peers_->name(item.first.sender);
+ const std::string& receiver = peers_->name(item.first.receiver);
+ RTC_LOG(LS_INFO) << item.first.ToString() << " ==> "
+ << "stream=" << stream_label << "; sender=" << sender
+ << "; receiver=" << receiver;
+ stream_to_sender.emplace(stream_label, sender);
+ auto streams_it = sender_to_streams.find(sender);
+ if (streams_it != sender_to_streams.end()) {
+ streams_it->second.emplace(stream_label);
+ } else {
+ sender_to_streams.emplace(sender, std::set<std::string>{stream_label});
+ }
+ auto receivers_it = stream_to_receivers.find(stream_label);
+ if (receivers_it != stream_to_receivers.end()) {
+ receivers_it->second.emplace(receiver);
+ } else {
+ stream_to_receivers.emplace(stream_label,
+ std::set<std::string>{receiver});
+ }
+ }
+
+ return VideoStreamsInfo(std::move(stream_to_sender),
+ std::move(sender_to_streams),
+ std::move(stream_to_receivers));
+}
+
+FrameCounters DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
+ MutexLock lock(&mutex_);
+ return frame_counters_;
+}
+
+std::map<std::string, FrameCounters>
+DefaultVideoQualityAnalyzer::GetUnknownSenderFrameCounters() const {
+ MutexLock lock(&mutex_);
+ return unknown_sender_frame_counters_;
+}
+
+std::map<StatsKey, FrameCounters>
+DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
+ MutexLock lock(&mutex_);
+ std::map<StatsKey, FrameCounters> out;
+ for (auto& item : stream_frame_counters_) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
+}
+
+std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
+ MutexLock lock1(&mutex_);
+ std::map<StatsKey, StreamStats> out;
+ for (auto& item : frames_comparator_.stream_stats()) {
+ out.emplace(ToStatsKey(item.first), item.second);
+ }
+ return out;
+}
+
+AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
+ MutexLock lock(&mutex_);
+ return analyzer_stats_;
+}
+
+uint16_t DefaultVideoQualityAnalyzer::GetNextFrameId() {
+ uint16_t frame_id = next_frame_id_++;
+ if (next_frame_id_ == VideoFrame::kNotSetId) {
+ next_frame_id_ = 1;
+ }
+ return frame_id;
+}
+
+void DefaultVideoQualityAnalyzer::
+ AddExistingFramesInFlightForStreamToComparator(size_t stream_index,
+ StreamState& stream_state,
+ size_t peer_index) {
+ InternalStatsKey stats_key(stream_index, stream_state.sender(), peer_index);
+
+ // Add frames in flight for this stream into frames comparator.
+ // Frames in flight were not rendered, so they won't affect stream's
+ // last rendered frame time.
+ while (!stream_state.IsEmpty(peer_index)) {
+ uint16_t frame_id = stream_state.PopFront(peer_index);
+ auto it = captured_frames_in_flight_.find(frame_id);
+ RTC_DCHECK(it != captured_frames_in_flight_.end());
+ FrameInFlight& frame = it->second;
+
+ frames_comparator_.AddComparison(stats_key, /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight,
+ frame.GetStatsForPeer(peer_index));
+ }
+}
+
+void DefaultVideoQualityAnalyzer::ReportResults() {
+ MutexLock lock(&mutex_);
+ for (auto& item : frames_comparator_.stream_stats()) {
+ ReportResults(item.first, item.second,
+ stream_frame_counters_.at(item.first));
+ }
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ metrics_logger_->LogSingleValueMetric(
+ "cpu_usage_%", test_label_, GetCpuUsagePercent(), Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter,
+ {{MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}});
+ LogFrameCounters("Global", frame_counters_);
+ if (!unknown_sender_frame_counters_.empty()) {
+ RTC_LOG(LS_INFO) << "Received frame counters with unknown frame id:";
+ for (const auto& [peer_name, frame_counters] :
+ unknown_sender_frame_counters_) {
+ LogFrameCounters(peer_name, frame_counters);
+ }
+ }
+ RTC_LOG(LS_INFO) << "Received frame counters per stream:";
+ for (const auto& [stats_key, stream_stats] :
+ frames_comparator_.stream_stats()) {
+ LogFrameCounters(ToStatsKey(stats_key).ToString(),
+ stream_frame_counters_.at(stats_key));
+ LogStreamInternalStats(ToStatsKey(stats_key).ToString(), stream_stats,
+ start_time_);
+ }
+ if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
+ RTC_LOG(LS_INFO) << "comparisons_queue_size min="
+ << analyzer_stats_.comparisons_queue_size.GetMin()
+ << "; max="
+ << analyzer_stats_.comparisons_queue_size.GetMax()
+ << "; 99%="
+ << analyzer_stats_.comparisons_queue_size.GetPercentile(
+ 0.99);
+ }
+ RTC_LOG(LS_INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done;
+ RTC_LOG(LS_INFO) << "cpu_overloaded_comparisons_done="
+ << analyzer_stats_.cpu_overloaded_comparisons_done;
+ RTC_LOG(LS_INFO) << "memory_overloaded_comparisons_done="
+ << analyzer_stats_.memory_overloaded_comparisons_done;
+ if (options_.report_infra_metrics) {
+ metrics_logger_->LogMetric("comparisons_queue_size", test_label_,
+ analyzer_stats_.comparisons_queue_size,
+ Unit::kCount,
+ ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric("frames_in_flight_left_count", test_label_,
+ analyzer_stats_.frames_in_flight_left_count,
+ Unit::kCount,
+ ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "comparisons_done", test_label_, analyzer_stats_.comparisons_done,
+ Unit::kCount, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "cpu_overloaded_comparisons_done", test_label_,
+ analyzer_stats_.cpu_overloaded_comparisons_done, Unit::kCount,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "memory_overloaded_comparisons_done", test_label_,
+ analyzer_stats_.memory_overloaded_comparisons_done, Unit::kCount,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "test_duration", test_label_, (Now() - start_time_).ms(),
+ Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter);
+
+ metrics_logger_->LogMetric(
+ "on_frame_captured_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_captured_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_frame_pre_encode_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_pre_encode_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_frame_encoded_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_encoded_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_frame_pre_decode_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_pre_decode_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_frame_decoded_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_decoded_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_frame_rendered_processing_time_ms", test_label_,
+ analyzer_stats_.on_frame_rendered_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ metrics_logger_->LogMetric(
+ "on_decoder_error_processing_time_ms", test_label_,
+ analyzer_stats_.on_decoder_error_processing_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
+ }
+}
+
+void DefaultVideoQualityAnalyzer::ReportResults(
+ const InternalStatsKey& key,
+ const StreamStats& stats,
+ const FrameCounters& frame_counters) {
+ TimeDelta test_duration = Now() - start_time_;
+ std::string test_case_name = GetTestCaseName(ToMetricName(key));
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> metric_metadata{
+ {MetricMetadataKey::kPeerMetadataKey, peers_->name(key.sender)},
+ {MetricMetadataKey::kVideoStreamMetadataKey, streams_.name(key.stream)},
+ {MetricMetadataKey::kSenderMetadataKey, peers_->name(key.sender)},
+ {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}};
+
+ double sum_squared_interframe_delays_secs = 0;
+ Timestamp video_start_time = Timestamp::PlusInfinity();
+ Timestamp video_end_time = Timestamp::MinusInfinity();
+ for (const SamplesStatsCounter::StatsSample& sample :
+ stats.time_between_rendered_frames_ms.GetTimedSamples()) {
+ double interframe_delay_ms = sample.value;
+ const double interframe_delays_secs = interframe_delay_ms / 1000.0;
+ // Sum of squared inter frame intervals is used to calculate the harmonic
+ // frame rate metric. The metric aims to reflect overall experience related
+ // to smoothness of video playback and includes both freezes and pauses.
+ sum_squared_interframe_delays_secs +=
+ interframe_delays_secs * interframe_delays_secs;
+ if (sample.time < video_start_time) {
+ video_start_time = sample.time;
+ }
+ if (sample.time > video_end_time) {
+ video_end_time = sample.time;
+ }
+ }
+ double harmonic_framerate_fps = 0;
+ TimeDelta video_duration = video_end_time - video_start_time;
+ if (sum_squared_interframe_delays_secs > 0.0 && video_duration.IsFinite()) {
+ harmonic_framerate_fps =
+ video_duration.seconds<double>() / sum_squared_interframe_delays_secs;
+ }
+
+ metrics_logger_->LogMetric(
+ "psnr_dB", test_case_name, stats.psnr, Unit::kUnitless,
+ ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "ssim", test_case_name, stats.ssim, Unit::kUnitless,
+ ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric("transport_time", test_case_name,
+ stats.transport_time_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric(
+ "total_delay_incl_transport", test_case_name,
+ stats.total_delay_incl_transport_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "time_between_rendered_frames", test_case_name,
+ stats.time_between_rendered_frames_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "harmonic_framerate", test_case_name, harmonic_framerate_fps,
+ Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "encode_frame_rate", test_case_name,
+ stats.encode_frame_rate.IsEmpty()
+ ? 0
+ : stats.encode_frame_rate.GetEventsPerSecond(),
+ Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "encode_time", test_case_name, stats.encode_time_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric("time_between_freezes", test_case_name,
+ stats.time_between_freezes_ms, Unit::kMilliseconds,
+ ImprovementDirection::kBiggerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric("freeze_time_ms", test_case_name,
+ stats.freeze_time_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric(
+ "pixels_per_frame", test_case_name, stats.resolution_of_decoded_frame,
+ Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "min_psnr_dB", test_case_name,
+ stats.psnr.IsEmpty() ? 0 : stats.psnr.GetMin(), Unit::kUnitless,
+ ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "decode_time", test_case_name, stats.decode_time_ms, Unit::kMilliseconds,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "receive_to_render_time", test_case_name, stats.receive_to_render_time_ms,
+ Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "dropped_frames", test_case_name, frame_counters.dropped, Unit::kCount,
+ ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "frames_in_flight", test_case_name,
+ frame_counters.captured - frame_counters.rendered -
+ frame_counters.dropped,
+ Unit::kCount, ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "rendered_frames", test_case_name, frame_counters.rendered, Unit::kCount,
+ ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "max_skipped", test_case_name, stats.skipped_between_rendered,
+ Unit::kCount, ImprovementDirection::kSmallerIsBetter, metric_metadata);
+ metrics_logger_->LogMetric(
+ "target_encode_bitrate", test_case_name,
+ stats.target_encode_bitrate / 1000, Unit::kKilobitsPerSecond,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ for (const auto& [spatial_layer, qp] : stats.spatial_layers_qp) {
+ std::map<std::string, std::string> qp_metadata = metric_metadata;
+ qp_metadata[MetricMetadataKey::kSpatialLayerMetadataKey] =
+ std::to_string(spatial_layer);
+ metrics_logger_->LogMetric("qp_sl" + std::to_string(spatial_layer),
+ test_case_name, qp, Unit::kUnitless,
+ ImprovementDirection::kSmallerIsBetter,
+ std::move(qp_metadata));
+ }
+ metrics_logger_->LogSingleValueMetric(
+ "actual_encode_bitrate", test_case_name,
+ static_cast<double>(stats.total_encoded_images_payload) /
+ test_duration.seconds<double>() * kBitsInByte / 1000,
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+
+ if (options_.report_detailed_frame_stats) {
+ metrics_logger_->LogSingleValueMetric(
+ "capture_frame_rate", test_case_name,
+ stats.capture_frame_rate.IsEmpty()
+ ? 0
+ : stats.capture_frame_rate.GetEventsPerSecond(),
+ Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "num_encoded_frames", test_case_name, frame_counters.encoded,
+ Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "num_decoded_frames", test_case_name, frame_counters.decoded,
+ Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "num_send_key_frames", test_case_name, stats.num_send_key_frames,
+ Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "num_recv_key_frames", test_case_name, stats.num_recv_key_frames,
+ Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
+
+ metrics_logger_->LogMetric("recv_key_frame_size_bytes", test_case_name,
+ stats.recv_key_frame_size_bytes, Unit::kCount,
+ ImprovementDirection::kBiggerIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric("recv_delta_frame_size_bytes", test_case_name,
+ stats.recv_delta_frame_size_bytes, Unit::kCount,
+ ImprovementDirection::kBiggerIsBetter,
+ metric_metadata);
+ }
+}
+
+std::string DefaultVideoQualityAnalyzer::GetTestCaseName(
+ const std::string& stream_label) const {
+ return test_label_ + "/" + stream_label;
+}
+
+Timestamp DefaultVideoQualityAnalyzer::Now() {
+ return clock_->CurrentTime();
+}
+
+StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
+ const InternalStatsKey& key) const {
+ return StatsKey(streams_.name(key.stream), peers_->name(key.receiver));
+}
+
+std::string DefaultVideoQualityAnalyzer::ToMetricName(
+ const InternalStatsKey& key) const {
+ const std::string& stream_label = streams_.name(key.stream);
+ if (peers_->GetKnownSize() <= 2 && key.sender != key.receiver) {
+ // TODO(titovartem): remove this special case.
+ return stream_label;
+ }
+ rtc::StringBuilder out;
+ out << stream_label << "_" << peers_->name(key.sender) << "_"
+ << peers_->name(key.receiver);
+ return out.str();
+}
+
+double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
+ return cpu_measurer_.GetCpuUsagePercent();
+}
+
+std::map<std::string, std::vector<uint16_t>>
+DefaultVideoQualityAnalyzer::GetStreamFrames() const {
+ MutexLock lock(&mutex_);
+ std::map<std::string, std::vector<uint16_t>> out;
+ for (auto entry_it : stream_to_frame_id_full_history_) {
+ out.insert({streams_.name(entry_it.first), entry_it.second});
+ }
+ return out;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
new file mode 100644
index 0000000000..b67e5a0147
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h
@@ -0,0 +1,197 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
+
+#include <atomic>
+#include <cstdint>
+#include <deque>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
+#include "test/pc/e2e/analyzer/video/names_collection.h"
+
+namespace webrtc {
+
+class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
+ public:
+ DefaultVideoQualityAnalyzer(webrtc::Clock* clock,
+ test::MetricsLogger* metrics_logger,
+ DefaultVideoQualityAnalyzerOptions options = {});
+ ~DefaultVideoQualityAnalyzer() override;
+
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
+ const VideoFrame& frame) override;
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) override;
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& input_image) override;
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
+ const DecoderStats& stats) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code,
+ const DecoderStats& stats) override;
+
+ void RegisterParticipantInCall(absl::string_view peer_name) override;
+ void UnregisterParticipantInCall(absl::string_view peer_name) override;
+
+ void Stop() override;
+ std::string GetStreamLabel(uint16_t frame_id) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
+
+ // Returns set of stream labels, that were met during test call.
+ std::set<StatsKey> GetKnownVideoStreams() const;
+ VideoStreamsInfo GetKnownStreams() const;
+ FrameCounters GetGlobalCounters() const;
+ // Returns frame counter for frames received without frame id set.
+ std::map<std::string, FrameCounters> GetUnknownSenderFrameCounters() const;
+ // Returns frame counter per stream label. Valid stream labels can be obtained
+ // by calling GetKnownVideoStreams()
+ std::map<StatsKey, FrameCounters> GetPerStreamCounters() const;
+ // Returns video quality stats per stream label. Valid stream labels can be
+ // obtained by calling GetKnownVideoStreams()
+ std::map<StatsKey, StreamStats> GetStats() const;
+ AnalyzerStats GetAnalyzerStats() const;
+ double GetCpuUsagePercent();
+
+ // Returns mapping from the stream label to the history of frames that were
+ // met in this stream in the order as they were captured.
+ std::map<std::string, std::vector<uint16_t>> GetStreamFrames() const;
+
+ private:
+ enum State { kNew, kActive, kStopped };
+
+ // Returns next frame id to use. Frame ID can't be `VideoFrame::kNotSetId`,
+ // because this value is reserved by `VideoFrame` as "ID not set".
+ uint16_t GetNextFrameId() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ void AddExistingFramesInFlightForStreamToComparator(size_t stream_index,
+ StreamState& stream_state,
+ size_t peer_index)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ // Report results for all metrics for all streams.
+ void ReportResults();
+ void ReportResults(const InternalStatsKey& key,
+ const StreamStats& stats,
+ const FrameCounters& frame_counters)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Returns name of current test case for reporting.
+ std::string GetTestCaseName(const std::string& stream_label) const;
+ Timestamp Now();
+ StatsKey ToStatsKey(const InternalStatsKey& key) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Returns string representation of stats key for metrics naming. Used for
+ // backward compatibility by metrics naming for 2 peers cases.
+ std::string ToMetricName(const InternalStatsKey& key) const
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ static const uint16_t kStartingFrameId = 1;
+
+ const DefaultVideoQualityAnalyzerOptions options_;
+ webrtc::Clock* const clock_;
+ test::MetricsLogger* const metrics_logger_;
+
+ std::string test_label_;
+
+ mutable Mutex mutex_;
+ uint16_t next_frame_id_ RTC_GUARDED_BY(mutex_) = kStartingFrameId;
+ std::unique_ptr<NamesCollection> peers_ RTC_GUARDED_BY(mutex_);
+ State state_ RTC_GUARDED_BY(mutex_) = State::kNew;
+ Timestamp start_time_ RTC_GUARDED_BY(mutex_) = Timestamp::MinusInfinity();
+ // Mapping from stream label to unique size_t value to use in stats and avoid
+ // extra string copying.
+ NamesCollection streams_ RTC_GUARDED_BY(mutex_);
+ // Frames that were captured by all streams and still aren't rendered on
+ // receivers or deemed dropped. Frame with id X can be removed from this map
+ // if:
+ // 1. The frame with id X was received in OnFrameRendered by all expected
+ // receivers.
+ // 2. The frame with id Y > X was received in OnFrameRendered by all expected
+ // receivers.
+ // 3. Next available frame id for newly captured frame is X
+ // 4. There too many frames in flight for current video stream and X is the
+ // oldest frame id in this stream. In such case only the frame content
+ // will be removed, but the map entry will be preserved.
+ std::map<uint16_t, FrameInFlight> captured_frames_in_flight_
+ RTC_GUARDED_BY(mutex_);
+ // Global frames count for all video streams.
+ FrameCounters frame_counters_ RTC_GUARDED_BY(mutex_);
+ // Frame counters for received frames without video frame id set.
+ // Map from peer name to the frame counters.
+ std::map<std::string, FrameCounters> unknown_sender_frame_counters_
+ RTC_GUARDED_BY(mutex_);
+ // Frame counters per each stream per each receiver.
+ std::map<InternalStatsKey, FrameCounters> stream_frame_counters_
+ RTC_GUARDED_BY(mutex_);
+ // Map from stream index in `streams_` to its StreamState.
+ std::map<size_t, StreamState> stream_states_ RTC_GUARDED_BY(mutex_);
+ // Map from stream index in `streams_` to sender peer index in `peers_`.
+ std::map<size_t, size_t> stream_to_sender_ RTC_GUARDED_BY(mutex_);
+
+ // Stores history mapping between stream index in `streams_` and frame ids.
+ // Updated when frame id overlap. It required to properly return stream label
+ // after 1st frame from simulcast streams was already rendered and last is
+ // still encoding.
+ std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
+ RTC_GUARDED_BY(mutex_);
+ // Map from stream index to the list of frames as they were met in the stream.
+ std::map<size_t, std::vector<uint16_t>> stream_to_frame_id_full_history_
+ RTC_GUARDED_BY(mutex_);
+ AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(mutex_);
+
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer_;
+ DefaultVideoQualityAnalyzerFramesComparator frames_comparator_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc
new file mode 100644
index 0000000000..847c9f09a6
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h"
+
+#include "rtc_base/cpu_time.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system_time.h"
+
+namespace webrtc {
+
+void DefaultVideoQualityAnalyzerCpuMeasurer::StartMeasuringCpuProcessTime() {
+ MutexLock lock(&mutex_);
+ cpu_time_ -= rtc::GetProcessCpuTimeNanos();
+ wallclock_time_ -= rtc::SystemTimeNanos();
+}
+
+void DefaultVideoQualityAnalyzerCpuMeasurer::StopMeasuringCpuProcessTime() {
+ MutexLock lock(&mutex_);
+ cpu_time_ += rtc::GetProcessCpuTimeNanos();
+ wallclock_time_ += rtc::SystemTimeNanos();
+}
+
+void DefaultVideoQualityAnalyzerCpuMeasurer::StartExcludingCpuThreadTime() {
+ MutexLock lock(&mutex_);
+ cpu_time_ += rtc::GetThreadCpuTimeNanos();
+}
+
+void DefaultVideoQualityAnalyzerCpuMeasurer::StopExcludingCpuThreadTime() {
+ MutexLock lock(&mutex_);
+ cpu_time_ -= rtc::GetThreadCpuTimeNanos();
+}
+
+double DefaultVideoQualityAnalyzerCpuMeasurer::GetCpuUsagePercent() {
+ MutexLock lock(&mutex_);
+ return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h
new file mode 100644
index 0000000000..dd9fa07af2
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_CPU_MEASURER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_CPU_MEASURER_H_
+
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// This class is thread safe.
+class DefaultVideoQualityAnalyzerCpuMeasurer {
+ public:
+ double GetCpuUsagePercent();
+
+ void StartMeasuringCpuProcessTime();
+ void StopMeasuringCpuProcessTime();
+ void StartExcludingCpuThreadTime();
+ void StopExcludingCpuThreadTime();
+
+ private:
+ Mutex mutex_;
+ int64_t cpu_time_ RTC_GUARDED_BY(mutex_) = 0;
+ int64_t wallclock_time_ RTC_GUARDED_BY(mutex_) = 0;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_CPU_MEASURER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc
new file mode 100644
index 0000000000..df34dadaf0
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc
@@ -0,0 +1,209 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
+
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_type.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+
+namespace webrtc {
+namespace {
+
+template <typename T>
+absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
+ auto it = map.find(key);
+ if (it == map.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
+} // namespace
+
+FrameInFlight::FrameInFlight(size_t stream,
+ VideoFrame frame,
+ Timestamp captured_time,
+ std::set<size_t> expected_receivers)
+ : stream_(stream),
+ expected_receivers_(std::move(expected_receivers)),
+ frame_(std::move(frame)),
+ captured_time_(captured_time) {}
+
+bool FrameInFlight::RemoveFrame() {
+ if (!frame_) {
+ return false;
+ }
+ frame_ = absl::nullopt;
+ return true;
+}
+
+void FrameInFlight::SetFrameId(uint16_t id) {
+ if (frame_) {
+ frame_->set_id(id);
+ }
+ frame_id_ = id;
+}
+
+std::vector<size_t> FrameInFlight::GetPeersWhichDidntReceive() const {
+ std::vector<size_t> out;
+ for (size_t peer : expected_receivers_) {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end() ||
+ (!it->second.dropped && it->second.rendered_time.IsInfinite())) {
+ out.push_back(peer);
+ }
+ }
+ return out;
+}
+
+bool FrameInFlight::HaveAllPeersReceived() const {
+ for (size_t peer : expected_receivers_) {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+
+ if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void FrameInFlight::OnFrameEncoded(webrtc::Timestamp time,
+ VideoFrameType frame_type,
+ DataSize encoded_image_size,
+ uint32_t target_encode_bitrate,
+ int spatial_layer,
+ int qp,
+ StreamCodecInfo used_encoder) {
+ encoded_time_ = time;
+ frame_type_ = frame_type;
+ encoded_image_size_ = encoded_image_size;
+ target_encode_bitrate_ += target_encode_bitrate;
+ spatial_layers_qp_[spatial_layer].AddSample(SamplesStatsCounter::StatsSample{
+ .value = static_cast<double>(qp), .time = time});
+ // Update used encoder info. If simulcast/SVC is used, this method can
+ // be called multiple times, in such case we should preserve the value
+ // of `used_encoder_.switched_on_at` from the first invocation as the
+ // smallest one.
+ Timestamp encoder_switched_on_at = used_encoder_.has_value()
+ ? used_encoder_->switched_on_at
+ : Timestamp::PlusInfinity();
+ RTC_DCHECK(used_encoder.switched_on_at.IsFinite());
+ RTC_DCHECK(used_encoder.switched_from_at.IsFinite());
+ used_encoder_ = used_encoder;
+ if (encoder_switched_on_at < used_encoder_->switched_on_at) {
+ used_encoder_->switched_on_at = encoder_switched_on_at;
+ }
+}
+
+void FrameInFlight::OnFramePreDecode(size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time,
+ VideoFrameType frame_type,
+ DataSize encoded_image_size) {
+ receiver_stats_[peer].received_time = received_time;
+ receiver_stats_[peer].decode_start_time = decode_start_time;
+ receiver_stats_[peer].frame_type = frame_type;
+ receiver_stats_[peer].encoded_image_size = encoded_image_size;
+}
+
+bool FrameInFlight::HasReceivedTime(size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.received_time.IsFinite();
+}
+
+void FrameInFlight::OnFrameDecoded(size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height,
+ const StreamCodecInfo& used_decoder) {
+ receiver_stats_[peer].decode_end_time = time;
+ receiver_stats_[peer].used_decoder = used_decoder;
+ receiver_stats_[peer].decoded_frame_width = width;
+ receiver_stats_[peer].decoded_frame_height = height;
+}
+
+void FrameInFlight::OnDecoderError(size_t peer,
+ const StreamCodecInfo& used_decoder) {
+ receiver_stats_[peer].decoder_failed = true;
+ receiver_stats_[peer].used_decoder = used_decoder;
+}
+
+bool FrameInFlight::HasDecodeEndTime(size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.decode_end_time.IsFinite();
+}
+
+void FrameInFlight::OnFrameRendered(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].rendered_time = time;
+}
+
+bool FrameInFlight::HasRenderedTime(size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.rendered_time.IsFinite();
+}
+
+bool FrameInFlight::IsDropped(size_t peer) const {
+ auto it = receiver_stats_.find(peer);
+ if (it == receiver_stats_.end()) {
+ return false;
+ }
+ return it->second.dropped;
+}
+
+FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const {
+ RTC_DCHECK_NE(frame_id_, VideoFrame::kNotSetId)
+ << "Frame id isn't initialized";
+ FrameStats stats(frame_id_, captured_time_);
+ stats.pre_encode_time = pre_encode_time_;
+ stats.encoded_time = encoded_time_;
+ stats.target_encode_bitrate = target_encode_bitrate_;
+ stats.encoded_frame_type = frame_type_;
+ stats.encoded_image_size = encoded_image_size_;
+ stats.used_encoder = used_encoder_;
+ stats.spatial_layers_qp = spatial_layers_qp_;
+
+ absl::optional<ReceiverFrameStats> receiver_stats =
+ MaybeGetValue<ReceiverFrameStats>(receiver_stats_, peer);
+ if (receiver_stats.has_value()) {
+ stats.received_time = receiver_stats->received_time;
+ stats.decode_start_time = receiver_stats->decode_start_time;
+ stats.decode_end_time = receiver_stats->decode_end_time;
+ stats.rendered_time = receiver_stats->rendered_time;
+ stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
+ stats.decoded_frame_width = receiver_stats->decoded_frame_width;
+ stats.decoded_frame_height = receiver_stats->decoded_frame_height;
+ stats.used_decoder = receiver_stats->used_decoder;
+ stats.pre_decoded_frame_type = receiver_stats->frame_type;
+ stats.pre_decoded_image_size = receiver_stats->encoded_image_size;
+ stats.decoder_failed = receiver_stats->decoder_failed;
+ }
+ return stats;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h
new file mode 100644
index 0000000000..52a526d09b
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h
@@ -0,0 +1,169 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAME_IN_FLIGHT_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAME_IN_FLIGHT_H_
+
+#include <map>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_type.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+
+namespace webrtc {
+
+struct ReceiverFrameStats {
+ // Time when last packet of a frame was received.
+ Timestamp received_time = Timestamp::MinusInfinity();
+ Timestamp decode_start_time = Timestamp::MinusInfinity();
+ Timestamp decode_end_time = Timestamp::MinusInfinity();
+ Timestamp rendered_time = Timestamp::MinusInfinity();
+ Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+
+ // Type and encoded size of received frame.
+ VideoFrameType frame_type = VideoFrameType::kEmptyFrame;
+ DataSize encoded_image_size = DataSize::Bytes(0);
+
+ absl::optional<int> decoded_frame_width = absl::nullopt;
+ absl::optional<int> decoded_frame_height = absl::nullopt;
+
+ // Can be not set if frame was dropped in the network.
+ absl::optional<StreamCodecInfo> used_decoder = absl::nullopt;
+
+ bool dropped = false;
+ bool decoder_failed = false;
+};
+
+// Represents a frame which was sent by sender and is currently on the way to
+// multiple receivers. Some receivers may receive this frame and some don't.
+//
+// Contains all statistic associated with the frame and gathered in multiple
+// points of the video pipeline.
+//
+// Internally may store the copy of the source frame which was sent. In such
+// case this frame is "alive".
+class FrameInFlight {
+ public:
+ FrameInFlight(size_t stream,
+ VideoFrame frame,
+ Timestamp captured_time,
+ std::set<size_t> expected_receivers);
+
+ size_t stream() const { return stream_; }
+ // Returns internal copy of source `VideoFrame` or `absl::nullopt` if it was
+ // removed before.
+ const absl::optional<VideoFrame>& frame() const { return frame_; }
+ // Removes internal copy of the source `VideoFrame` to free up extra memory.
+ // Returns was frame removed or not.
+ bool RemoveFrame();
+ void SetFrameId(uint16_t id);
+
+ void AddExpectedReceiver(size_t peer) { expected_receivers_.insert(peer); }
+
+ void RemoveExpectedReceiver(size_t peer) { expected_receivers_.erase(peer); }
+
+ std::vector<size_t> GetPeersWhichDidntReceive() const;
+
+ // Returns if all peers which were expected to receive this frame actually
+ // received it or not.
+ bool HaveAllPeersReceived() const;
+
+ void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
+
+ void OnFrameEncoded(webrtc::Timestamp time,
+ VideoFrameType frame_type,
+ DataSize encoded_image_size,
+ uint32_t target_encode_bitrate,
+ int spatial_layer,
+ int qp,
+ StreamCodecInfo used_encoder);
+
+ bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
+
+ void OnFramePreDecode(size_t peer,
+ webrtc::Timestamp received_time,
+ webrtc::Timestamp decode_start_time,
+ VideoFrameType frame_type,
+ DataSize encoded_image_size);
+
+ bool HasReceivedTime(size_t peer) const;
+
+ void OnFrameDecoded(size_t peer,
+ webrtc::Timestamp time,
+ int width,
+ int height,
+ const StreamCodecInfo& used_decoder);
+ void OnDecoderError(size_t peer, const StreamCodecInfo& used_decoder);
+
+ bool HasDecodeEndTime(size_t peer) const;
+
+ void OnFrameRendered(size_t peer, webrtc::Timestamp time);
+
+ bool HasRenderedTime(size_t peer) const;
+
+ // Crash if rendered time is not set for specified `peer`.
+ webrtc::Timestamp rendered_time(size_t peer) const {
+ return receiver_stats_.at(peer).rendered_time;
+ }
+
+ // Marks that frame was dropped and wasn't seen by particular `peer`.
+ void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
+ bool IsDropped(size_t peer) const;
+
+ void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
+ receiver_stats_[peer].prev_frame_rendered_time = time;
+ }
+
+ FrameStats GetStatsForPeer(size_t peer) const;
+
+ private:
+ const size_t stream_;
+ // Set of peer's indexes who are expected to receive this frame. This is not
+ // the set of peer's indexes that received the frame. For example, if peer A
+ // was among expected receivers, it received frame and then left the call, A
+ // will be removed from this set, but the Stats for peer A still will be
+ // preserved in the FrameInFlight.
+ //
+ // This set is used to determine if this frame is expected to be received by
+ // any peer or can be safely deleted. It is responsibility of the user of this
+ // object to decide when it should be deleted.
+ std::set<size_t> expected_receivers_;
+ absl::optional<VideoFrame> frame_;
+ // Store frame id separately because `frame_` can be removed when we have too
+ // much memory consuption.
+ uint16_t frame_id_ = VideoFrame::kNotSetId;
+
+ // Frame events timestamp.
+ Timestamp captured_time_;
+ Timestamp pre_encode_time_ = Timestamp::MinusInfinity();
+ Timestamp encoded_time_ = Timestamp::MinusInfinity();
+ // Type and encoded size of sent frame.
+ VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame;
+ DataSize encoded_image_size_ = DataSize::Bytes(0);
+ uint32_t target_encode_bitrate_ = 0;
+ // Sender side qp values per spatial layer. In case when spatial layer is not
+ // set for `webrtc::EncodedImage`, 0 is used as default.
+ std::map<int, SamplesStatsCounter> spatial_layers_qp_;
+ // Can be not set if frame was dropped by encoder.
+ absl::optional<StreamCodecInfo> used_encoder_ = absl::nullopt;
+ // Map from the receiver peer's index to frame stats for that peer.
+ std::map<size_t, ReceiverFrameStats> receiver_stats_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAME_IN_FLIGHT_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc
new file mode 100644
index 0000000000..cbc0b7e8f3
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc
@@ -0,0 +1,575 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
+
+#include <algorithm>
+#include <map>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_type.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace {
+
+using ::webrtc::webrtc_pc_e2e::SampleMetadataKey;
+
+constexpr TimeDelta kFreezeThreshold = TimeDelta::Millis(150);
+constexpr int kMaxActiveComparisons = 10;
+
+SamplesStatsCounter::StatsSample StatsSample(
+ double value,
+ Timestamp sampling_time,
+ std::map<std::string, std::string> metadata) {
+ return SamplesStatsCounter::StatsSample{value, sampling_time,
+ std::move(metadata)};
+}
+
+SamplesStatsCounter::StatsSample StatsSample(
+ TimeDelta duration,
+ Timestamp sampling_time,
+ std::map<std::string, std::string> metadata) {
+ return SamplesStatsCounter::StatsSample{duration.ms<double>(), sampling_time,
+ std::move(metadata)};
+}
+
+FrameComparison ValidateFrameComparison(FrameComparison comparison) {
+ RTC_DCHECK(comparison.frame_stats.captured_time.IsFinite())
+ << "Any comparison has to have finite captured_time";
+ switch (comparison.type) {
+ case FrameComparisonType::kRegular:
+ // Regular comparison has to have all FrameStats filled in.
+ RTC_DCHECK(comparison.captured.has_value() ||
+ comparison.overload_reason != OverloadReason::kNone)
+ << "Regular comparison has to have captured frame if it's not "
+ << "overloaded comparison";
+ RTC_DCHECK(comparison.rendered.has_value() ||
+ comparison.overload_reason != OverloadReason::kNone)
+ << "rendered frame has to be presented if it's not overloaded "
+ << "comparison";
+ RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
+ << "Regular comparison has to have finite pre_encode_time";
+ RTC_DCHECK(comparison.frame_stats.encoded_time.IsFinite())
+ << "Regular comparison has to have finite encoded_time";
+ RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
+ << "Regular comparison has to have finite received_time";
+ RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
+ << "Regular comparison has to have finite decode_start_time";
+ RTC_DCHECK(comparison.frame_stats.decode_end_time.IsFinite())
+ << "Regular comparison has to have finite decode_end_time";
+ RTC_DCHECK(comparison.frame_stats.rendered_time.IsFinite())
+ << "Regular comparison has to have finite rendered_time";
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value())
+ << "Regular comparison has to have decoded_frame_width";
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value())
+ << "Regular comparison has to have decoded_frame_height";
+ RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
+ << "Regular comparison has to have used_encoder";
+ RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
+ << "Regular comparison has to have used_decoder";
+ RTC_DCHECK(!comparison.frame_stats.decoder_failed)
+ << "Regular comparison can't have decoder failure";
+ break;
+ case FrameComparisonType::kDroppedFrame:
+ // Frame can be dropped before encoder, by encoder, inside network or
+ // after decoder.
+ RTC_DCHECK(!comparison.captured.has_value())
+ << "Dropped frame comparison can't have captured frame";
+ RTC_DCHECK(!comparison.rendered.has_value())
+ << "Dropped frame comparison can't have rendered frame";
+
+ if (comparison.frame_stats.encoded_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
+ << "Dropped frame comparison has to have used_encoder when "
+ << "encoded_time is set";
+ RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
+ << "Dropped frame comparison has to have finite pre_encode_time "
+ << "when encoded_time is finite.";
+ }
+
+ if (comparison.frame_stats.decode_end_time.IsFinite() ||
+ comparison.frame_stats.decoder_failed) {
+ RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
+ << "Dropped frame comparison has to have received_time when "
+ << "decode_end_time is set or decoder_failed is true";
+ RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
+ << "Dropped frame comparison has to have decode_start_time when "
+ << "decode_end_time is set or decoder_failed is true";
+ RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
+ << "Dropped frame comparison has to have used_decoder when "
+ << "decode_end_time is set or decoder_failed is true";
+ } else if (comparison.frame_stats.decode_end_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value())
+ << "Dropped frame comparison has to have decoded_frame_width when "
+ << "decode_end_time is set";
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value())
+ << "Dropped frame comparison has to have decoded_frame_height when "
+ << "decode_end_time is set";
+ }
+ RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite())
+ << "Dropped frame comparison can't have rendered_time";
+ break;
+ case FrameComparisonType::kFrameInFlight:
+ // Frame in flight comparison may miss almost any FrameStats, but if
+ // stats for stage X are set, then stats for stage X - 1 also has to be
+ // set. Also these frames were never rendered.
+ RTC_DCHECK(!comparison.captured.has_value())
+ << "Frame in flight comparison can't have captured frame";
+ RTC_DCHECK(!comparison.rendered.has_value())
+ << "Frame in flight comparison can't have rendered frame";
+ RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite())
+ << "Frame in flight comparison can't have rendered_time";
+
+ if (comparison.frame_stats.decode_end_time.IsFinite() ||
+ comparison.frame_stats.decoder_failed) {
+ RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
+ << "Frame in flight comparison has to have used_decoder when "
+ << "decode_end_time is set or decoder_failed is true.";
+ RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
+ << "Frame in flight comparison has to have finite "
+ << "decode_start_time when decode_end_time is finite or "
+ << "decoder_failed is true.";
+ }
+ if (comparison.frame_stats.decode_end_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_width.has_value())
+ << "Frame in flight comparison has to have decoded_frame_width "
+ << "when decode_end_time is set.";
+ RTC_DCHECK(comparison.frame_stats.decoded_frame_height.has_value())
+ << "Frame in flight comparison has to have decoded_frame_height "
+ << "when decode_end_time is set.";
+ }
+ if (comparison.frame_stats.decode_start_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
+ << "Frame in flight comparison has to have finite received_time "
+ << "when decode_start_time is finite.";
+ }
+ if (comparison.frame_stats.received_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.encoded_time.IsFinite())
+ << "Frame in flight comparison has to have finite encoded_time "
+ << "when received_time is finite.";
+ }
+ if (comparison.frame_stats.encoded_time.IsFinite()) {
+ RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
+ << "Frame in flight comparison has to have used_encoder when "
+ << "encoded_time is set";
+ RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
+ << "Frame in flight comparison has to have finite pre_encode_time "
+ << "when encoded_time is finite.";
+ }
+ break;
+ }
+ return comparison;
+}
+
+} // namespace
+
+void DefaultVideoQualityAnalyzerFramesComparator::Start(int max_threads_count) {
+ for (int i = 0; i < max_threads_count; i++) {
+ thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable(
+ [this] { ProcessComparisons(); },
+ "DefaultVideoQualityAnalyzerFramesComparator-" + std::to_string(i)));
+ }
+ {
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kNew) << "Frames comparator is already started";
+ state_ = State::kActive;
+ }
+ cpu_measurer_.StartMeasuringCpuProcessTime();
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::Stop(
+ const std::map<InternalStatsKey, Timestamp>& last_rendered_frame_times) {
+ {
+ MutexLock lock(&mutex_);
+ if (state_ == State::kStopped) {
+ return;
+ }
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "Frames comparator has to be started before it will be used";
+ state_ = State::kStopped;
+ }
+ cpu_measurer_.StopMeasuringCpuProcessTime();
+ comparison_available_event_.Set();
+ thread_pool_.clear();
+
+ {
+ MutexLock lock(&mutex_);
+ // Perform final Metrics update. On this place analyzer is stopped and no
+ // one holds any locks.
+
+ // Time between freezes.
+ // Count time since the last freeze to the end of the call as time
+ // between freezes.
+ for (auto& entry : last_rendered_frame_times) {
+ const InternalStatsKey& stats_key = entry.first;
+ const Timestamp& last_rendered_frame_time = entry.second;
+
+ // If there are no freezes in the call we have to report
+ // time_between_freezes_ms as call duration and in such case
+ // `last_rendered_frame_time` for this stream will be stream start time.
+ // If there is freeze, then we need add time from last rendered frame
+ // to last freeze end as time between freezes.
+ stream_stats_.at(stats_key).time_between_freezes_ms.AddSample(StatsSample(
+ last_rendered_frame_time - stream_last_freeze_end_time_.at(stats_key),
+ Now(), /*metadata=*/{}));
+ }
+
+ // Freeze Time:
+ // If there were no freezes on a video stream, add only one sample with
+ // value 0 (0ms freezes time).
+ for (auto& [key, stream_stats] : stream_stats_) {
+ if (stream_stats.freeze_time_ms.IsEmpty()) {
+ stream_stats.freeze_time_ms.AddSample(0);
+ }
+ }
+ }
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::EnsureStatsForStream(
+ size_t stream_index,
+ size_t sender_peer_index,
+ size_t peers_count,
+ Timestamp captured_time,
+ Timestamp start_time) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "Frames comparator has to be started before it will be used";
+
+ for (size_t i = 0; i < peers_count; ++i) {
+ if (i == sender_peer_index && !options_.enable_receive_own_stream) {
+ continue;
+ }
+ InternalStatsKey stats_key(stream_index, sender_peer_index, i);
+ if (stream_stats_.find(stats_key) == stream_stats_.end()) {
+ stream_stats_.insert({stats_key, StreamStats(captured_time)});
+ // Assume that the first freeze was before first stream frame captured.
+ // This way time before the first freeze would be counted as time
+ // between freezes.
+ stream_last_freeze_end_time_.insert({stats_key, start_time});
+ } else {
+ // When we see some `stream_label` for the first time we need to create
+ // stream stats object for it and set up some states, but we need to do
+ // it only once and for all receivers, so on the next frame on the same
+ // `stream_label` we can be sure, that it's already done and we needn't
+ // to scan though all peers again.
+ break;
+ }
+ }
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::RegisterParticipantInCall(
+ rtc::ArrayView<std::pair<InternalStatsKey, Timestamp>> stream_started_time,
+ Timestamp start_time) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "Frames comparator has to be started before it will be used";
+
+ for (const std::pair<InternalStatsKey, Timestamp>& pair :
+ stream_started_time) {
+ stream_stats_.insert({pair.first, StreamStats(pair.second)});
+ stream_last_freeze_end_time_.insert({pair.first, start_time});
+ }
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
+ InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "Frames comparator has to be started before it will be used";
+ AddComparisonInternal(std::move(stats_key), std::move(captured),
+ std::move(rendered), type, std::move(frame_stats));
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
+ InternalStatsKey stats_key,
+ int skipped_between_rendered,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats) {
+ MutexLock lock(&mutex_);
+ RTC_CHECK_EQ(state_, State::kActive)
+ << "Frames comparator has to be started before it will be used";
+ stream_stats_.at(stats_key).skipped_between_rendered.AddSample(
+ StatsSample(skipped_between_rendered, Now(),
+ /*metadata=*/
+ {{SampleMetadataKey::kFrameIdMetadataKey,
+ std::to_string(frame_stats.frame_id)}}));
+ AddComparisonInternal(std::move(stats_key), std::move(captured),
+ std::move(rendered), type, std::move(frame_stats));
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal(
+ InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats) {
+ cpu_measurer_.StartExcludingCpuThreadTime();
+ frames_comparator_stats_.comparisons_queue_size.AddSample(
+ StatsSample(comparisons_.size(), Now(), /*metadata=*/{}));
+ // If there too many computations waiting in the queue, we won't provide
+ // frames itself to make future computations lighter.
+ if (comparisons_.size() >= kMaxActiveComparisons) {
+ comparisons_.emplace_back(ValidateFrameComparison(
+ FrameComparison(std::move(stats_key), /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt, type,
+ std::move(frame_stats), OverloadReason::kCpu)));
+ } else {
+ OverloadReason overload_reason = OverloadReason::kNone;
+ if (!captured && type == FrameComparisonType::kRegular) {
+ overload_reason = OverloadReason::kMemory;
+ }
+ comparisons_.emplace_back(ValidateFrameComparison(FrameComparison(
+ std::move(stats_key), std::move(captured), std::move(rendered), type,
+ std::move(frame_stats), overload_reason)));
+ }
+ comparison_available_event_.Set();
+ cpu_measurer_.StopExcludingCpuThreadTime();
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparisons() {
+ while (true) {
+ // Try to pick next comparison to perform from the queue.
+ absl::optional<FrameComparison> comparison = absl::nullopt;
+ bool more_new_comparisons_expected;
+ {
+ MutexLock lock(&mutex_);
+ if (!comparisons_.empty()) {
+ comparison = comparisons_.front();
+ comparisons_.pop_front();
+ if (!comparisons_.empty()) {
+ comparison_available_event_.Set();
+ }
+ }
+ // If state is stopped => no new frame comparisons are expected.
+ more_new_comparisons_expected = state_ != State::kStopped;
+ }
+ if (!comparison) {
+ if (!more_new_comparisons_expected) {
+ comparison_available_event_.Set();
+ return;
+ }
+ comparison_available_event_.Wait(TimeDelta::Seconds(1));
+ continue;
+ }
+
+ cpu_measurer_.StartExcludingCpuThreadTime();
+ ProcessComparison(comparison.value());
+ cpu_measurer_.StopExcludingCpuThreadTime();
+ }
+}
+
+void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
+ const FrameComparison& comparison) {
+ // Comparison is checked to be valid before adding, so we can use this
+ // assumptions during computations.
+
+ // Perform expensive psnr and ssim calculations while not holding lock.
+ double psnr = -1.0;
+ double ssim = -1.0;
+ if ((options_.compute_psnr || options_.compute_ssim) &&
+ comparison.captured.has_value() && comparison.rendered.has_value()) {
+ rtc::scoped_refptr<I420BufferInterface> reference_buffer =
+ comparison.captured->video_frame_buffer()->ToI420();
+ rtc::scoped_refptr<I420BufferInterface> test_buffer =
+ comparison.rendered->video_frame_buffer()->ToI420();
+ if (options_.adjust_cropping_before_comparing_frames) {
+ test_buffer = ScaleVideoFrameBuffer(
+ *test_buffer, reference_buffer->width(), reference_buffer->height());
+ reference_buffer = test::AdjustCropping(reference_buffer, test_buffer);
+ }
+ if (options_.compute_psnr) {
+ psnr = options_.use_weighted_psnr
+ ? I420WeightedPSNR(*reference_buffer, *test_buffer)
+ : I420PSNR(*reference_buffer, *test_buffer);
+ }
+ if (options_.compute_ssim) {
+ ssim = I420SSIM(*reference_buffer, *test_buffer);
+ }
+ }
+
+ const FrameStats& frame_stats = comparison.frame_stats;
+
+ MutexLock lock(&mutex_);
+ auto stats_it = stream_stats_.find(comparison.stats_key);
+ RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
+ StreamStats* stats = &stats_it->second;
+
+ frames_comparator_stats_.comparisons_done++;
+ if (comparison.overload_reason == OverloadReason::kCpu) {
+ frames_comparator_stats_.cpu_overloaded_comparisons_done++;
+ } else if (comparison.overload_reason == OverloadReason::kMemory) {
+ frames_comparator_stats_.memory_overloaded_comparisons_done++;
+ }
+
+ std::map<std::string, std::string> metadata;
+ metadata.emplace(SampleMetadataKey::kFrameIdMetadataKey,
+ std::to_string(frame_stats.frame_id));
+
+ if (psnr > 0) {
+ stats->psnr.AddSample(
+ StatsSample(psnr, frame_stats.rendered_time, metadata));
+ }
+ if (ssim > 0) {
+ stats->ssim.AddSample(
+ StatsSample(ssim, frame_stats.received_time, metadata));
+ }
+ stats->capture_frame_rate.AddEvent(frame_stats.captured_time);
+
+ // Compute dropped phase for dropped frame
+ if (comparison.type == FrameComparisonType::kDroppedFrame) {
+ FrameDropPhase dropped_phase;
+ if (frame_stats.decode_end_time.IsFinite()) {
+ dropped_phase = FrameDropPhase::kAfterDecoder;
+ } else if (frame_stats.decode_start_time.IsFinite()) {
+ dropped_phase = FrameDropPhase::kByDecoder;
+ } else if (frame_stats.encoded_time.IsFinite()) {
+ dropped_phase = FrameDropPhase::kTransport;
+ } else if (frame_stats.pre_encode_time.IsFinite()) {
+ dropped_phase = FrameDropPhase::kByEncoder;
+ } else {
+ dropped_phase = FrameDropPhase::kBeforeEncoder;
+ }
+ stats->dropped_by_phase[dropped_phase]++;
+ }
+
+ if (frame_stats.encoded_time.IsFinite()) {
+ stats->encode_time_ms.AddSample(
+ StatsSample(frame_stats.encoded_time - frame_stats.pre_encode_time,
+ frame_stats.encoded_time, metadata));
+ stats->encode_frame_rate.AddEvent(frame_stats.encoded_time);
+ stats->total_encoded_images_payload +=
+ frame_stats.encoded_image_size.bytes();
+ stats->target_encode_bitrate.AddSample(StatsSample(
+ frame_stats.target_encode_bitrate, frame_stats.encoded_time, metadata));
+ for (const auto& [spatial_layer, qp_values] :
+ frame_stats.spatial_layers_qp) {
+ for (SamplesStatsCounter::StatsSample qp : qp_values.GetTimedSamples()) {
+ qp.metadata = metadata;
+ stats->spatial_layers_qp[spatial_layer].AddSample(std::move(qp));
+ }
+ }
+
+ // Stats sliced on encoded frame type.
+ if (frame_stats.encoded_frame_type == VideoFrameType::kVideoFrameKey) {
+ ++stats->num_send_key_frames;
+ }
+ }
+ // Next stats can be calculated only if frame was received on remote side.
+ if (comparison.type != FrameComparisonType::kDroppedFrame ||
+ comparison.frame_stats.decoder_failed) {
+ if (frame_stats.rendered_time.IsFinite()) {
+ stats->total_delay_incl_transport_ms.AddSample(
+ StatsSample(frame_stats.rendered_time - frame_stats.captured_time,
+ frame_stats.received_time, metadata));
+ stats->receive_to_render_time_ms.AddSample(
+ StatsSample(frame_stats.rendered_time - frame_stats.received_time,
+ frame_stats.rendered_time, metadata));
+ }
+ if (frame_stats.decode_start_time.IsFinite()) {
+ stats->transport_time_ms.AddSample(
+ StatsSample(frame_stats.decode_start_time - frame_stats.encoded_time,
+ frame_stats.decode_start_time, metadata));
+
+ // Stats sliced on decoded frame type.
+ if (frame_stats.pre_decoded_frame_type ==
+ VideoFrameType::kVideoFrameKey) {
+ ++stats->num_recv_key_frames;
+ stats->recv_key_frame_size_bytes.AddSample(
+ StatsSample(frame_stats.pre_decoded_image_size.bytes(),
+ frame_stats.decode_start_time, metadata));
+ } else if (frame_stats.pre_decoded_frame_type ==
+ VideoFrameType::kVideoFrameDelta) {
+ stats->recv_delta_frame_size_bytes.AddSample(
+ StatsSample(frame_stats.pre_decoded_image_size.bytes(),
+ frame_stats.decode_start_time, metadata));
+ }
+ }
+ if (frame_stats.decode_end_time.IsFinite()) {
+ stats->decode_time_ms.AddSample(StatsSample(
+ frame_stats.decode_end_time - frame_stats.decode_start_time,
+ frame_stats.decode_end_time, metadata));
+ stats->resolution_of_decoded_frame.AddSample(
+ StatsSample(*comparison.frame_stats.decoded_frame_width *
+ *comparison.frame_stats.decoded_frame_height,
+ frame_stats.decode_end_time, metadata));
+ }
+
+ if (frame_stats.prev_frame_rendered_time.IsFinite() &&
+ frame_stats.rendered_time.IsFinite()) {
+ TimeDelta time_between_rendered_frames =
+ frame_stats.rendered_time - frame_stats.prev_frame_rendered_time;
+ stats->time_between_rendered_frames_ms.AddSample(StatsSample(
+ time_between_rendered_frames, frame_stats.rendered_time, metadata));
+ TimeDelta average_time_between_rendered_frames = TimeDelta::Millis(
+ stats->time_between_rendered_frames_ms.GetAverage());
+ if (time_between_rendered_frames >
+ std::max(kFreezeThreshold + average_time_between_rendered_frames,
+ 3 * average_time_between_rendered_frames)) {
+ stats->freeze_time_ms.AddSample(StatsSample(
+ time_between_rendered_frames, frame_stats.rendered_time, metadata));
+ auto freeze_end_it =
+ stream_last_freeze_end_time_.find(comparison.stats_key);
+ RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
+ stats->time_between_freezes_ms.AddSample(StatsSample(
+ frame_stats.prev_frame_rendered_time - freeze_end_it->second,
+ frame_stats.rendered_time, metadata));
+ freeze_end_it->second = frame_stats.rendered_time;
+ }
+ }
+ }
+ // Compute stream codec info.
+ if (frame_stats.used_encoder.has_value()) {
+ if (stats->encoders.empty() || stats->encoders.back().codec_name !=
+ frame_stats.used_encoder->codec_name) {
+ stats->encoders.push_back(*frame_stats.used_encoder);
+ }
+ stats->encoders.back().last_frame_id =
+ frame_stats.used_encoder->last_frame_id;
+ stats->encoders.back().switched_from_at =
+ frame_stats.used_encoder->switched_from_at;
+ }
+
+ if (frame_stats.used_decoder.has_value()) {
+ if (stats->decoders.empty() || stats->decoders.back().codec_name !=
+ frame_stats.used_decoder->codec_name) {
+ stats->decoders.push_back(*frame_stats.used_decoder);
+ }
+ stats->decoders.back().last_frame_id =
+ frame_stats.used_decoder->last_frame_id;
+ stats->decoders.back().switched_from_at =
+ frame_stats.used_decoder->switched_from_at;
+ }
+}
+
+Timestamp DefaultVideoQualityAnalyzerFramesComparator::Now() {
+ return clock_->CurrentTime();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h
new file mode 100644
index 0000000000..006c3eb9bf
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAMES_COMPARATOR_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAMES_COMPARATOR_H_
+
+#include <deque>
+#include <map>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "rtc_base/event.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+
+namespace webrtc {
+
+struct FramesComparatorStats {
+ // Size of analyzer internal comparisons queue, measured when new element
+ // id added to the queue.
+ SamplesStatsCounter comparisons_queue_size;
+ // Number of performed comparisons of 2 video frames from captured and
+ // rendered streams.
+ int64_t comparisons_done = 0;
+ // Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
+ // queued when there are too many not processed comparisons in the queue.
+ // Overloaded comparison doesn't include metrics like SSIM and PSNR that
+ // require heavy computations.
+ int64_t cpu_overloaded_comparisons_done = 0;
+ // Number of memory overloaded comparisons. Comparison is memory overloaded if
+ // it is queued when its captured frame was already removed due to high memory
+ // usage for that video stream.
+ int64_t memory_overloaded_comparisons_done = 0;
+};
+
+// Performs comparisons of added frames and tracks frames related statistics.
+// This class is thread safe.
+class DefaultVideoQualityAnalyzerFramesComparator {
+ public:
+ // Creates frames comparator.
+ // Frames comparator doesn't use `options.enable_receive_own_stream` for any
+ // purposes, because it's unrelated to its functionality.
+ DefaultVideoQualityAnalyzerFramesComparator(
+ webrtc::Clock* clock,
+ DefaultVideoQualityAnalyzerCpuMeasurer& cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions options = {})
+ : options_(options), clock_(clock), cpu_measurer_(cpu_measurer) {}
+ ~DefaultVideoQualityAnalyzerFramesComparator() { Stop({}); }
+
+ // Starts frames comparator. This method must be invoked before calling
+ // any other method on this object.
+ void Start(int max_threads_count);
+ // Stops frames comparator. This method will block until all added frame
+ // comparisons will be processed. After `Stop()` is invoked no more new
+ // comparisons can be added to this frames comparator.
+ //
+ // `last_rendered_frame_time` contains timestamps of last rendered frame for
+ // each (stream, sender, receiver) tuple to properly update time between
+ // freezes: it has include time from the last freeze until and of call.
+ void Stop(
+ const std::map<InternalStatsKey, Timestamp>& last_rendered_frame_times);
+
+ // Ensures that stream `stream_index` has stats objects created for all
+ // potential receivers. This method must be called before adding any
+ // frames comparison for that stream.
+ void EnsureStatsForStream(size_t stream_index,
+ size_t sender_peer_index,
+ size_t peers_count,
+ Timestamp captured_time,
+ Timestamp start_time);
+ // Ensures that newly added participant will have stream stats objects created
+ // for all streams which they can receive. This method must be called before
+ // any frames comparison will be added for the newly added participant.
+ //
+ // `stream_started_time` - start time of each stream for which stats object
+ // has to be created.
+ // `start_time` - call start time.
+ void RegisterParticipantInCall(
+ rtc::ArrayView<std::pair<InternalStatsKey, Timestamp>>
+ stream_started_time,
+ Timestamp start_time);
+
+ // `captured` - video frame captured by sender to use for PSNR/SSIM
+ // computation. If `type` is `FrameComparisonType::kRegular` and
+ // `captured` is `absl::nullopt` comparison is assumed to be overloaded
+ // due to memory constraints.
+ // `rendered` - video frame rendered by receiver to use for PSNR/SSIM
+ // computation. Required only if `type` is
+ // `FrameComparisonType::kRegular`, but can still be omitted if
+ // `captured` is `absl::nullopt`.
+ void AddComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats);
+ // `skipped_between_rendered` - amount of frames dropped on this stream before
+ // last received frame and current frame.
+ void AddComparison(InternalStatsKey stats_key,
+ int skipped_between_rendered,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats);
+
+ std::map<InternalStatsKey, StreamStats> stream_stats() const {
+ MutexLock lock(&mutex_);
+ return stream_stats_;
+ }
+ FramesComparatorStats frames_comparator_stats() const {
+ MutexLock lock(&mutex_);
+ return frames_comparator_stats_;
+ }
+
+ private:
+ enum State { kNew, kActive, kStopped };
+
+ void AddComparisonInternal(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ void ProcessComparisons();
+ void ProcessComparison(const FrameComparison& comparison);
+ Timestamp Now();
+
+ const DefaultVideoQualityAnalyzerOptions options_;
+ webrtc::Clock* const clock_;
+ DefaultVideoQualityAnalyzerCpuMeasurer& cpu_measurer_;
+
+ mutable Mutex mutex_;
+ State state_ RTC_GUARDED_BY(mutex_) = State::kNew;
+ std::map<InternalStatsKey, StreamStats> stream_stats_ RTC_GUARDED_BY(mutex_);
+ std::map<InternalStatsKey, Timestamp> stream_last_freeze_end_time_
+ RTC_GUARDED_BY(mutex_);
+ std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(mutex_);
+ FramesComparatorStats frames_comparator_stats_ RTC_GUARDED_BY(mutex_);
+
+ std::vector<rtc::PlatformThread> thread_pool_;
+ rtc::Event comparison_available_event_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAMES_COMPARATOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc
new file mode 100644
index 0000000000..8d3cd47ed6
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc
@@ -0,0 +1,1648 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "api/test/create_frame_generator.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/strings/string_builder.h"
+#include "system_wrappers/include/clock.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Contains;
+using ::testing::DoubleEq;
+using ::testing::Each;
+using ::testing::Eq;
+using ::testing::IsEmpty;
+using ::testing::Pair;
+using ::testing::SizeIs;
+
+using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample;
+
+constexpr int kMaxFramesInFlightPerStream = 10;
+
+DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() {
+ DefaultVideoQualityAnalyzerOptions options;
+ options.compute_psnr = false;
+ options.compute_ssim = false;
+ options.adjust_cropping_before_comparing_frames = false;
+ options.max_frames_in_flight_per_stream_count = kMaxFramesInFlightPerStream;
+ return options;
+}
+
+VideoFrame CreateFrame(uint16_t frame_id,
+ int width,
+ int height,
+ Timestamp timestamp) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(width, height,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator->NextFrame();
+ return VideoFrame::Builder()
+ .set_id(frame_id)
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .set_timestamp_us(timestamp.us())
+ .build();
+}
+
+StreamCodecInfo Vp8CodecForOneFrame(uint16_t frame_id, Timestamp time) {
+ StreamCodecInfo info;
+ info.codec_name = "VP8";
+ info.first_frame_id = frame_id;
+ info.last_frame_id = frame_id;
+ info.switched_on_at = time;
+ info.switched_from_at = time;
+ return info;
+}
+
+FrameStats FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ uint16_t frame_id,
+ Timestamp captured_time) {
+ FrameStats frame_stats(frame_id, captured_time);
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Decode time is in microseconds.
+ frame_stats.decode_end_time = captured_time + TimeDelta::Micros(40010);
+ frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
+ frame_stats.used_encoder = Vp8CodecForOneFrame(1, frame_stats.encoded_time);
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(1, frame_stats.decode_end_time);
+ frame_stats.decoded_frame_width = 10;
+ frame_stats.decoded_frame_height = 10;
+ return frame_stats;
+}
+
+FrameStats ShiftStatsOn(const FrameStats& stats, TimeDelta delta) {
+ FrameStats frame_stats(stats.frame_id, stats.captured_time + delta);
+ frame_stats.pre_encode_time = stats.pre_encode_time + delta;
+ frame_stats.encoded_time = stats.encoded_time + delta;
+ frame_stats.received_time = stats.received_time + delta;
+ frame_stats.decode_start_time = stats.decode_start_time + delta;
+ frame_stats.decode_end_time = stats.decode_end_time + delta;
+ frame_stats.rendered_time = stats.rendered_time + delta;
+
+ frame_stats.used_encoder = stats.used_encoder;
+ frame_stats.used_decoder = stats.used_decoder;
+ frame_stats.decoded_frame_width = stats.decoded_frame_width;
+ frame_stats.decoded_frame_height = stats.decoded_frame_height;
+
+ return frame_stats;
+}
+
+SamplesStatsCounter StatsCounter(
+ const std::vector<std::pair<double, Timestamp>>& samples) {
+ SamplesStatsCounter counter;
+ for (const std::pair<double, Timestamp>& sample : samples) {
+ counter.AddSample(SamplesStatsCounter::StatsSample{.value = sample.first,
+ .time = sample.second});
+ }
+ return counter;
+}
+
+double GetFirstOrDie(const SamplesStatsCounter& counter) {
+ EXPECT_FALSE(counter.IsEmpty()) << "Counter has to be not empty";
+ return counter.GetSamples()[0];
+}
+
+void AssertFirstMetadataHasField(const SamplesStatsCounter& counter,
+ const std::string& field_name,
+ const std::string& field_value) {
+ EXPECT_FALSE(counter.IsEmpty()) << "Coutner has to be not empty";
+ EXPECT_THAT(counter.GetTimedSamples()[0].metadata,
+ Contains(Pair(field_name, field_value)));
+}
+
+std::string ToString(const SamplesStatsCounter& counter) {
+ rtc::StringBuilder out;
+ for (const StatsSample& s : counter.GetTimedSamples()) {
+ out << "{ time_ms=" << s.time.ms() << "; value=" << s.value << "}, ";
+ }
+ return out.str();
+}
+
+void ExpectEmpty(const SamplesStatsCounter& counter) {
+ EXPECT_TRUE(counter.IsEmpty())
+ << "Expected empty SamplesStatsCounter, but got " << ToString(counter);
+}
+
+void ExpectEmpty(const SamplesRateCounter& counter) {
+ EXPECT_TRUE(counter.IsEmpty())
+ << "Expected empty SamplesRateCounter, but got "
+ << counter.GetEventsPerSecond();
+}
+
+void ExpectSizeAndAllElementsAre(const SamplesStatsCounter& counter,
+ int size,
+ double value) {
+ EXPECT_EQ(counter.NumSamples(), size);
+ EXPECT_THAT(counter.GetSamples(), Each(DoubleEq(value)));
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ StatsPresentedAfterAddingOneComparison) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer, AnalyzerOptionsForTest());
+
+ Timestamp stream_start_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ size_t peers_count = 2;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ FrameStats frame_stats = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ /*frame_id=*/1, stream_start_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, peers_count,
+ stream_start_time, stream_start_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ std::map<InternalStatsKey, StreamStats> stats = comparator.stream_stats();
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).total_delay_incl_transport_ms,
+ /*size=*/1, /*value=*/60.0);
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).encode_time_ms, /*size=*/1,
+ /*value=*/10.0);
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).decode_time_ms, /*size=*/1,
+ /*value=*/0.01);
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).receive_to_render_time_ms,
+ /*size=*/1, /*value=*/30.0);
+ ExpectSizeAndAllElementsAre(stats.at(stats_key).resolution_of_decoded_frame,
+ /*size=*/1, /*value=*/100.0);
+}
+
+TEST(
+ DefaultVideoQualityAnalyzerFramesComparatorTest,
+ MultiFrameStatsPresentedWithMetadataAfterAddingTwoComparisonWith10msDelay) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer, AnalyzerOptionsForTest());
+
+ Timestamp stream_start_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ size_t peers_count = 2;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ FrameStats frame_stats1 = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ /*frame_id=*/1, stream_start_time);
+ FrameStats frame_stats2 = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ /*frame_id=*/2, stream_start_time + TimeDelta::Millis(15));
+ frame_stats2.prev_frame_rendered_time = frame_stats1.rendered_time;
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, peers_count,
+ stream_start_time, stream_start_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular, frame_stats1);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular, frame_stats2);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ std::map<InternalStatsKey, StreamStats> stats = comparator.stream_stats();
+ ExpectSizeAndAllElementsAre(
+ stats.at(stats_key).time_between_rendered_frames_ms, /*size=*/1,
+ /*value=*/15.0);
+ AssertFirstMetadataHasField(
+ stats.at(stats_key).time_between_rendered_frames_ms, "frame_id", "2");
+ EXPECT_DOUBLE_EQ(stats.at(stats_key).encode_frame_rate.GetEventsPerSecond(),
+ 2.0 / 15 * 1000)
+ << "There should be 2 events with interval of 15 ms";
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ FrameInFlightStatsAreHandledCorrectly) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer, AnalyzerOptionsForTest());
+
+ Timestamp stream_start_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ size_t peers_count = 2;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // There are 7 different timings inside frame stats: captured, pre_encode,
+ // encoded, received, decode_start, decode_end, rendered. captured is always
+ // set and received is set together with decode_start. So we create 6
+ // different frame stats with interval of 15 ms, where for each stat next
+ // timings will be set
+ // * 1st - captured
+ // * 2nd - captured, pre_encode
+ // * 3rd - captured, pre_encode, encoded
+ // * 4th - captured, pre_encode, encoded, received, decode_start
+ // * 5th - captured, pre_encode, encoded, received, decode_start, decode_end
+ // * 6th - all of them set
+ std::vector<FrameStats> stats;
+ // 1st stat
+ FrameStats frame_stats(/*frame_id=*/1, stream_start_time);
+ stats.push_back(frame_stats);
+ // 2nd stat
+ frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
+ frame_stats.frame_id = 2;
+ frame_stats.pre_encode_time =
+ frame_stats.captured_time + TimeDelta::Millis(10);
+ stats.push_back(frame_stats);
+ // 3rd stat
+ frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
+ frame_stats.frame_id = 3;
+ frame_stats.encoded_time = frame_stats.captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder = Vp8CodecForOneFrame(1, frame_stats.encoded_time);
+ stats.push_back(frame_stats);
+ // 4th stat
+ frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
+ frame_stats.frame_id = 4;
+ frame_stats.received_time = frame_stats.captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time =
+ frame_stats.captured_time + TimeDelta::Millis(40);
+ stats.push_back(frame_stats);
+ // 5th stat
+ frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
+ frame_stats.frame_id = 5;
+ frame_stats.decode_end_time =
+ frame_stats.captured_time + TimeDelta::Millis(50);
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(1, frame_stats.decode_end_time);
+ frame_stats.decoded_frame_width = 10;
+ frame_stats.decoded_frame_height = 10;
+ stats.push_back(frame_stats);
+ // 6th stat
+ frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
+ frame_stats.frame_id = 6;
+ frame_stats.rendered_time = frame_stats.captured_time + TimeDelta::Millis(60);
+ stats.push_back(frame_stats);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, peers_count,
+ stream_start_time, stream_start_time);
+ for (size_t i = 0; i < stats.size() - 1; ++i) {
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, stats[i]);
+ }
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular,
+ stats[stats.size() - 1]);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats result_stats = comparator.stream_stats().at(stats_key);
+
+ EXPECT_DOUBLE_EQ(result_stats.transport_time_ms.GetAverage(), 20.0)
+ << ToString(result_stats.transport_time_ms);
+ EXPECT_EQ(result_stats.transport_time_ms.NumSamples(), 3);
+
+ EXPECT_DOUBLE_EQ(result_stats.total_delay_incl_transport_ms.GetAverage(),
+ 60.0)
+ << ToString(result_stats.total_delay_incl_transport_ms);
+ EXPECT_EQ(result_stats.total_delay_incl_transport_ms.NumSamples(), 1);
+
+ EXPECT_DOUBLE_EQ(result_stats.encode_time_ms.GetAverage(), 10)
+ << ToString(result_stats.encode_time_ms);
+ EXPECT_EQ(result_stats.encode_time_ms.NumSamples(), 4);
+
+ EXPECT_DOUBLE_EQ(result_stats.decode_time_ms.GetAverage(), 10)
+ << ToString(result_stats.decode_time_ms);
+ EXPECT_EQ(result_stats.decode_time_ms.NumSamples(), 2);
+
+ EXPECT_DOUBLE_EQ(result_stats.receive_to_render_time_ms.GetAverage(), 30)
+ << ToString(result_stats.receive_to_render_time_ms);
+ EXPECT_EQ(result_stats.receive_to_render_time_ms.NumSamples(), 1);
+
+ EXPECT_DOUBLE_EQ(result_stats.resolution_of_decoded_frame.GetAverage(), 100)
+ << ToString(result_stats.resolution_of_decoded_frame);
+ EXPECT_EQ(result_stats.resolution_of_decoded_frame.NumSamples(), 2);
+
+ EXPECT_DOUBLE_EQ(result_stats.encode_frame_rate.GetEventsPerSecond(),
+ 4.0 / 45 * 1000)
+ << "There should be 4 events with interval of 15 ms";
+}
+
+// Tests to validate that stats for each possible input frame are computed
+// correctly.
+// Frame in flight start
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ CapturedOnlyInFlightFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectEmpty(stats.encode_time_ms);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectEmpty(stats.target_encode_bitrate);
+ EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 0);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_THAT(stats.encoders, IsEmpty());
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ PreEncodedInFlightFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectEmpty(stats.encode_time_ms);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectEmpty(stats.target_encode_bitrate);
+ EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 0);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_THAT(stats.encoders, IsEmpty());
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ EncodedInFlightKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ EncodedInFlightDeltaFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ PreDecodedInFlightKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectSizeAndAllElementsAre(stats.transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
+ /*value=*/500.0);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 1);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ DecodedInFlightKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
+ frame_stats.decoded_frame_width = 200;
+ frame_stats.decoded_frame_height = 100;
+
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectSizeAndAllElementsAre(stats.transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectSizeAndAllElementsAre(stats.decode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
+ /*value=*/500.0);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 1);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_EQ(stats.decoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_decoder});
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ DecoderFailureOnInFlightKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decoder_failed = true;
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kFrameInFlight, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectSizeAndAllElementsAre(stats.transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
+ /*value=*/500.0);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 1);
+ // All frame in flight are not considered as dropped.
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_EQ(stats.decoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_decoder});
+}
+// Frame in flight end
+
+// Dropped frame start
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ CapturedOnlyDroppedFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectEmpty(stats.encode_time_ms);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectEmpty(stats.target_encode_bitrate);
+ EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 0);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 1},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_THAT(stats.encoders, IsEmpty());
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ PreEncodedDroppedFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectEmpty(stats.encode_time_ms);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectEmpty(stats.target_encode_bitrate);
+ EXPECT_THAT(stats.spatial_layers_qp, IsEmpty());
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 0);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 1},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_THAT(stats.encoders, IsEmpty());
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ EncodedDroppedKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 1},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ EncodedDroppedDeltaFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameDelta;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 0);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 1},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ PreDecodedDroppedKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 1},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_THAT(stats.decoders, IsEmpty());
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ DecodedDroppedKeyFrameAccountedInStats) {
+ // We don't really drop frames after decoder, so it's a bit unclear what is
+ // correct way to account such frames in stats, so this test just fixes some
+ // current way.
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+ frame_stats.decoded_frame_width = 200;
+ frame_stats.decoded_frame_height = 100;
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectEmpty(stats.transport_time_ms);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectEmpty(stats.recv_key_frame_size_bytes);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 0);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 1}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_EQ(stats.decoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_decoder});
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ DecoderFailedDroppedKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decoder_failed = true;
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kDroppedFrame, frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ ExpectEmpty(stats.psnr);
+ ExpectEmpty(stats.ssim);
+ ExpectSizeAndAllElementsAre(stats.transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ ExpectEmpty(stats.total_delay_incl_transport_ms);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ ExpectEmpty(stats.decode_time_ms);
+ ExpectEmpty(stats.receive_to_render_time_ms);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ ExpectEmpty(stats.resolution_of_decoded_frame);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
+ /*value=*/500.0);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 1);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 1},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_EQ(stats.decoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_decoder});
+}
+// Dropped frame end
+
+// Regular frame start
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ RenderedKeyFrameAccountedInStats) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ VideoFrame frame =
+ CreateFrame(frame_id, /*width=*/320, /*height=*/180, captured_time);
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+ frame_stats.decoded_frame_width = 200;
+ frame_stats.decoded_frame_height = 100;
+ // Frame rendered
+ frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/frame,
+ /*rendered=*/frame, FrameComparisonType::kRegular,
+ frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ EXPECT_EQ(stats.stream_started_time, captured_time);
+ EXPECT_GE(GetFirstOrDie(stats.psnr), 20);
+ EXPECT_GE(GetFirstOrDie(stats.ssim), 0.5);
+ ExpectSizeAndAllElementsAre(stats.transport_time_ms, /*size=*/1,
+ /*value=*/20.0);
+ EXPECT_GE(GetFirstOrDie(stats.total_delay_incl_transport_ms), 60.0);
+ ExpectEmpty(stats.time_between_rendered_frames_ms);
+ ExpectEmpty(stats.encode_frame_rate);
+ ExpectSizeAndAllElementsAre(stats.encode_time_ms, /*size=*/1, /*value=*/10.0);
+ EXPECT_GE(GetFirstOrDie(stats.decode_time_ms), 10.0);
+ EXPECT_GE(GetFirstOrDie(stats.receive_to_render_time_ms), 30.0);
+ ExpectEmpty(stats.skipped_between_rendered);
+ ExpectSizeAndAllElementsAre(stats.freeze_time_ms, /*size=*/1, /*value=*/0);
+ ExpectEmpty(stats.time_between_freezes_ms);
+ EXPECT_GE(GetFirstOrDie(stats.resolution_of_decoded_frame), 200 * 100.0);
+ ExpectSizeAndAllElementsAre(stats.target_encode_bitrate, /*size=*/1,
+ /*value=*/2000.0);
+ EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1));
+ ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2,
+ /*value=*/5.0);
+ ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1,
+ /*value=*/500.0);
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+ EXPECT_EQ(stats.total_encoded_images_payload, 1000);
+ EXPECT_EQ(stats.num_send_key_frames, 1);
+ EXPECT_EQ(stats.num_recv_key_frames, 1);
+ EXPECT_THAT(stats.dropped_by_phase, Eq(std::map<FrameDropPhase, int64_t>{
+ {FrameDropPhase::kBeforeEncoder, 0},
+ {FrameDropPhase::kByEncoder, 0},
+ {FrameDropPhase::kTransport, 0},
+ {FrameDropPhase::kByDecoder, 0},
+ {FrameDropPhase::kAfterDecoder, 0}}));
+ EXPECT_EQ(stats.encoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_encoder});
+ EXPECT_EQ(stats.decoders,
+ std::vector<StreamCodecInfo>{*frame_stats.used_decoder});
+}
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer,
+ DefaultVideoQualityAnalyzerOptions());
+
+ Timestamp captured_time = Clock::GetRealTimeClock()->CurrentTime();
+ uint16_t frame_id = 1;
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ // Frame captured
+ VideoFrame frame =
+ CreateFrame(frame_id, /*width=*/320, /*height=*/180, captured_time);
+ FrameStats frame_stats(/*frame_id=*/1, captured_time);
+ // Frame pre encoded
+ frame_stats.pre_encode_time = captured_time + TimeDelta::Millis(10);
+ // Frame encoded
+ frame_stats.encoded_time = captured_time + TimeDelta::Millis(20);
+ frame_stats.used_encoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.encoded_time);
+ frame_stats.encoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.encoded_image_size = DataSize::Bytes(1000);
+ frame_stats.target_encode_bitrate = 2000;
+ frame_stats.spatial_layers_qp = {
+ {0, StatsCounter(
+ /*samples=*/{{5, Timestamp::Seconds(1)},
+ {5, Timestamp::Seconds(2)}})}};
+ // Frame pre decoded
+ frame_stats.pre_decoded_frame_type = VideoFrameType::kVideoFrameKey;
+ frame_stats.pre_decoded_image_size = DataSize::Bytes(500);
+ frame_stats.received_time = captured_time + TimeDelta::Millis(30);
+ frame_stats.decode_start_time = captured_time + TimeDelta::Millis(40);
+ // Frame decoded
+ frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
+ frame_stats.used_decoder =
+ Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time);
+ // Frame rendered
+ frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
+ frame_stats.decoded_frame_width = 200;
+ frame_stats.decoded_frame_height = 100;
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2,
+ captured_time, captured_time);
+ comparator.AddComparison(stats_key,
+ /*captured=*/frame,
+ /*rendered=*/frame, FrameComparisonType::kRegular,
+ frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ EXPECT_EQ(comparator.stream_stats().size(), 1lu);
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ AssertFirstMetadataHasField(stats.psnr, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.ssim, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.transport_time_ms, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.total_delay_incl_transport_ms, "frame_id",
+ "1");
+ AssertFirstMetadataHasField(stats.encode_time_ms, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.decode_time_ms, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.receive_to_render_time_ms, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.resolution_of_decoded_frame, "frame_id",
+ "1");
+ AssertFirstMetadataHasField(stats.target_encode_bitrate, "frame_id", "1");
+ AssertFirstMetadataHasField(stats.spatial_layers_qp[0], "frame_id", "1");
+ AssertFirstMetadataHasField(stats.recv_key_frame_size_bytes, "frame_id", "1");
+
+ ExpectEmpty(stats.recv_delta_frame_size_bytes);
+}
+// Regular frame end
+
+TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
+ FreezeStatsPresentedWithMetadataAfterAddFrameWithSkippedAndDelay) {
+ DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
+ DefaultVideoQualityAnalyzerFramesComparator comparator(
+ Clock::GetRealTimeClock(), cpu_measurer, AnalyzerOptionsForTest());
+
+ Timestamp stream_start_time = Clock::GetRealTimeClock()->CurrentTime();
+ size_t stream = 0;
+ size_t sender = 0;
+ size_t receiver = 1;
+ size_t peers_count = 2;
+ InternalStatsKey stats_key(stream, sender, receiver);
+
+ comparator.Start(/*max_threads_count=*/1);
+ comparator.EnsureStatsForStream(stream, sender, peers_count,
+ stream_start_time, stream_start_time);
+
+ // Add 5 frames which were rendered with 30 fps (~30ms between frames)
+ // Frame ids are in [1..5] and last frame is with 120ms offset from first.
+ Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+ for (int i = 0; i < 5; ++i) {
+ FrameStats frame_stats = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ /*frame_id=*/i + 1, stream_start_time + TimeDelta::Millis(30 * i));
+ frame_stats.prev_frame_rendered_time = prev_frame_rendered_time;
+ prev_frame_rendered_time = frame_stats.rendered_time;
+
+ comparator.AddComparison(stats_key,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular, frame_stats);
+ }
+
+ // Next frame was rendered with 4 frames skipped and delay 300ms after last
+ // frame.
+ FrameStats freeze_frame_stats =
+ FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
+ /*frame_id=*/10, stream_start_time + TimeDelta::Millis(120 + 300));
+ freeze_frame_stats.prev_frame_rendered_time = prev_frame_rendered_time;
+
+ comparator.AddComparison(stats_key,
+ /*skipped_between_rendered=*/4,
+ /*captured=*/absl::nullopt,
+ /*rendered=*/absl::nullopt,
+ FrameComparisonType::kRegular, freeze_frame_stats);
+ comparator.Stop(/*last_rendered_frame_times=*/{});
+
+ StreamStats stats = comparator.stream_stats().at(stats_key);
+ ASSERT_THAT(GetFirstOrDie(stats.skipped_between_rendered), Eq(4));
+ AssertFirstMetadataHasField(stats.skipped_between_rendered, "frame_id", "10");
+ ASSERT_THAT(GetFirstOrDie(stats.freeze_time_ms), Eq(300));
+ AssertFirstMetadataHasField(stats.freeze_time_ms, "frame_id", "10");
+ // 180ms is time from the stream start to the rendered time of the last frame
+ // among first 5 frames which were received before freeze.
+ ASSERT_THAT(GetFirstOrDie(stats.time_between_freezes_ms), Eq(180));
+ AssertFirstMetadataHasField(stats.time_between_freezes_ms, "frame_id", "10");
+}
+// Stats validation tests end.
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc
new file mode 100644
index 0000000000..16f49ef154
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
+
+#include "api/video/video_frame.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+std::string InternalStatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << "stream=" << stream << "_sender=" << sender
+ << "_receiver=" << receiver;
+ return out.str();
+}
+
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
+ if (a.stream != b.stream) {
+ return a.stream < b.stream;
+ }
+ if (a.sender != b.sender) {
+ return a.sender < b.sender;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
+ return a.stream == b.stream && a.sender == b.sender &&
+ a.receiver == b.receiver;
+}
+
+FrameComparison::FrameComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats,
+ OverloadReason overload_reason)
+ : stats_key(std::move(stats_key)),
+ captured(std::move(captured)),
+ rendered(std::move(rendered)),
+ type(type),
+ frame_stats(std::move(frame_stats)),
+ overload_reason(overload_reason) {}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h
new file mode 100644
index 0000000000..10f1314f46
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_INTERNAL_SHARED_OBJECTS_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_INTERNAL_SHARED_OBJECTS_H_
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_type.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+
+namespace webrtc {
+
+struct InternalStatsKey {
+ InternalStatsKey(size_t stream, size_t sender, size_t receiver)
+ : stream(stream), sender(sender), receiver(receiver) {}
+
+ std::string ToString() const;
+
+ size_t stream;
+ size_t sender;
+ size_t receiver;
+};
+
+// Required to use InternalStatsKey as std::map key.
+bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
+bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
+
+// Final stats computed for frame after it went through the whole video
+// pipeline from capturing to rendering or dropping.
+struct FrameStats {
+ FrameStats(uint16_t frame_id, Timestamp captured_time)
+ : frame_id(frame_id), captured_time(captured_time) {}
+
+ uint16_t frame_id;
+ // Frame events timestamp.
+ Timestamp captured_time;
+ Timestamp pre_encode_time = Timestamp::MinusInfinity();
+ Timestamp encoded_time = Timestamp::MinusInfinity();
+ // Time when last packet of a frame was received.
+ Timestamp received_time = Timestamp::MinusInfinity();
+ Timestamp decode_start_time = Timestamp::MinusInfinity();
+ Timestamp decode_end_time = Timestamp::MinusInfinity();
+ Timestamp rendered_time = Timestamp::MinusInfinity();
+ Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
+
+ VideoFrameType encoded_frame_type = VideoFrameType::kEmptyFrame;
+ DataSize encoded_image_size = DataSize::Bytes(0);
+ VideoFrameType pre_decoded_frame_type = VideoFrameType::kEmptyFrame;
+ DataSize pre_decoded_image_size = DataSize::Bytes(0);
+ uint32_t target_encode_bitrate = 0;
+ // Sender side qp values per spatial layer. In case when spatial layer is not
+ // set for `webrtc::EncodedImage`, 0 is used as default.
+ std::map<int, SamplesStatsCounter> spatial_layers_qp;
+
+ absl::optional<int> decoded_frame_width = absl::nullopt;
+ absl::optional<int> decoded_frame_height = absl::nullopt;
+
+ // Can be not set if frame was dropped by encoder.
+ absl::optional<StreamCodecInfo> used_encoder = absl::nullopt;
+ // Can be not set if frame was dropped in the network.
+ absl::optional<StreamCodecInfo> used_decoder = absl::nullopt;
+
+ bool decoder_failed = false;
+};
+
+// Describes why comparison was done in overloaded mode (without calculating
+// PSNR and SSIM).
+enum class OverloadReason {
+ kNone,
+ // Not enough CPU to process all incoming comparisons.
+ kCpu,
+ // Not enough memory to store captured frames for all comparisons.
+ kMemory
+};
+
+enum class FrameComparisonType {
+ // Comparison for captured and rendered frame.
+ kRegular,
+ // Comparison for captured frame that is known to be dropped somewhere in
+ // video pipeline.
+ kDroppedFrame,
+ // Comparison for captured frame that was still in the video pipeline when
+ // test was stopped. It's unknown is this frame dropped or would it be
+ // delivered if test continue.
+ kFrameInFlight
+};
+
+// Represents comparison between two VideoFrames. Contains video frames itself
+// and stats. Can be one of two types:
+// 1. Normal - in this case `captured` is presented and either `rendered` is
+// presented and `dropped` is false, either `rendered` is omitted and
+// `dropped` is true.
+// 2. Overloaded - in this case both `captured` and `rendered` are omitted
+// because there were too many comparisons in the queue. `dropped` can be
+// true or false showing was frame dropped or not.
+struct FrameComparison {
+ FrameComparison(InternalStatsKey stats_key,
+ absl::optional<VideoFrame> captured,
+ absl::optional<VideoFrame> rendered,
+ FrameComparisonType type,
+ FrameStats frame_stats,
+ OverloadReason overload_reason);
+
+ InternalStatsKey stats_key;
+ // Frames can be omitted if there too many computations waiting in the
+ // queue.
+ absl::optional<VideoFrame> captured;
+ absl::optional<VideoFrame> rendered;
+ FrameComparisonType type;
+ FrameStats frame_stats;
+ OverloadReason overload_reason;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_INTERNAL_SHARED_OBJECTS_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc
new file mode 100644
index 0000000000..f5029ac956
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc
@@ -0,0 +1,682 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/rtp_packet_info.h"
+#include "api/rtp_packet_infos.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/metrics/metric.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/metrics/stdout_metrics_exporter.h"
+#include "api/video/encoded_image.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Contains;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAre;
+
+using ::webrtc::test::DefaultMetricsLogger;
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Metric;
+using ::webrtc::test::MetricsExporter;
+using ::webrtc::test::StdoutMetricsExporter;
+using ::webrtc::test::Unit;
+
+constexpr int kAnalyzerMaxThreadsCount = 1;
+constexpr int kMaxFramesInFlightPerStream = 10;
+constexpr int kFrameWidth = 320;
+constexpr int kFrameHeight = 240;
+
+DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() {
+ DefaultVideoQualityAnalyzerOptions options;
+ options.compute_psnr = true;
+ options.compute_ssim = true;
+ options.adjust_cropping_before_comparing_frames = false;
+ options.max_frames_in_flight_per_stream_count = kMaxFramesInFlightPerStream;
+ options.report_detailed_frame_stats = true;
+ return options;
+}
+
+VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
+ int64_t timestamp_us) {
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator->NextFrame();
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .set_timestamp_us(timestamp_us)
+ .build();
+}
+
+EncodedImage FakeEncode(const VideoFrame& frame) {
+ EncodedImage image;
+ std::vector<RtpPacketInfo> packet_infos;
+ packet_infos.push_back(RtpPacketInfo(
+ /*ssrc=*/1,
+ /*csrcs=*/{},
+ /*rtp_timestamp=*/frame.timestamp(),
+ /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000)));
+ image.SetPacketInfos(RtpPacketInfos(packet_infos));
+ return image;
+}
+
+VideoFrame DeepCopy(const VideoFrame& frame) {
+ VideoFrame copy = frame;
+ copy.set_video_frame_buffer(
+ I420Buffer::Copy(*frame.video_frame_buffer()->ToI420()));
+ return copy;
+}
+
+void PassFramesThroughAnalyzer(DefaultVideoQualityAnalyzer& analyzer,
+ absl::string_view sender,
+ absl::string_view stream_label,
+ std::vector<absl::string_view> receivers,
+ int frames_count,
+ test::FrameGeneratorInterface& frame_generator,
+ int interframe_delay_ms = 0) {
+ for (int i = 0; i < frames_count; ++i) {
+ VideoFrame frame = NextFrame(&frame_generator, /*timestamp_us=*/1);
+ uint16_t frame_id =
+ analyzer.OnFrameCaptured(sender, std::string(stream_label), frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode(sender, frame);
+ analyzer.OnFrameEncoded(sender, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ for (absl::string_view receiver : receivers) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(receiver, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(receiver, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(receiver, received_frame);
+ }
+ if (i < frames_count - 1 && interframe_delay_ms > 0) {
+ SleepMs(interframe_delay_ms);
+ }
+ }
+}
+
+// Metric fields to assert on
+struct MetricValidationInfo {
+ std::string test_case;
+ std::string name;
+ Unit unit;
+ ImprovementDirection improvement_direction;
+};
+
+bool operator==(const MetricValidationInfo& a, const MetricValidationInfo& b) {
+ return a.name == b.name && a.test_case == b.test_case && a.unit == b.unit &&
+ a.improvement_direction == b.improvement_direction;
+}
+
+std::ostream& operator<<(std::ostream& os, const MetricValidationInfo& m) {
+ os << "{ test_case=" << m.test_case << "; name=" << m.name
+ << "; unit=" << test::ToString(m.unit)
+ << "; improvement_direction=" << test::ToString(m.improvement_direction)
+ << " }";
+ return os;
+}
+
+std::vector<MetricValidationInfo> ToValidationInfo(
+ const std::vector<Metric>& metrics) {
+ std::vector<MetricValidationInfo> out;
+ for (const Metric& m : metrics) {
+ out.push_back(
+ MetricValidationInfo{.test_case = m.test_case,
+ .name = m.name,
+ .unit = m.unit,
+ .improvement_direction = m.improvement_direction});
+ }
+ return out;
+}
+
+std::vector<std::string> ToTestCases(const std::vector<Metric>& metrics) {
+ std::vector<std::string> out;
+ for (const Metric& m : metrics) {
+ out.push_back(m.test_case);
+ }
+ return out;
+}
+
+TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock());
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ &metrics_logger, options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/5, *frame_generator,
+ /*interframe_delay_ms=*/50);
+ analyzer.Stop();
+
+ std::vector<MetricValidationInfo> metrics =
+ ToValidationInfo(metrics_logger.GetCollectedMetrics());
+ EXPECT_THAT(
+ metrics,
+ UnorderedElementsAre(
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "ssim",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "transport_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "total_delay_incl_transport",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "time_between_rendered_frames",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "harmonic_framerate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "encode_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "encode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "time_between_freezes",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "freeze_time_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "pixels_per_frame",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "min_psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "decode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "receive_to_render_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "dropped_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "frames_in_flight",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "rendered_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "max_skipped",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "target_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "qp_sl0",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "actual_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "capture_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "num_encoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "num_decoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "num_send_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "num_recv_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "recv_key_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "recv_delta_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{.test_case = "test_case",
+ .name = "cpu_usage_%",
+ .unit = Unit::kUnitless,
+ .improvement_direction =
+ ImprovementDirection::kSmallerIsBetter}));
+}
+
+TEST(DefaultVideoQualityAnalyzerMetricNamesTest,
+ MetricNamesFor3PeersAreCorrect) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock());
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ &metrics_logger, options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{"alice", "bob", "charlie"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"bob", "charlie"},
+ /*frames_count=*/5, *frame_generator,
+ /*interframe_delay_ms=*/50);
+ analyzer.Stop();
+
+ std::vector<MetricValidationInfo> metrics =
+ ToValidationInfo(metrics_logger.GetCollectedMetrics());
+ EXPECT_THAT(
+ metrics,
+ UnorderedElementsAre(
+ // Bob
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "ssim",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "transport_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "total_delay_incl_transport",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "time_between_rendered_frames",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "harmonic_framerate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "encode_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "encode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "time_between_freezes",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "freeze_time_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "pixels_per_frame",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "min_psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "decode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "receive_to_render_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "dropped_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "frames_in_flight",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "rendered_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "max_skipped",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "target_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "qp_sl0",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "actual_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "capture_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "num_encoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "num_decoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "num_send_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "num_recv_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "recv_key_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_bob",
+ .name = "recv_delta_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+
+ // Charlie
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "ssim",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "transport_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "total_delay_incl_transport",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "time_between_rendered_frames",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "harmonic_framerate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "encode_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "encode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "time_between_freezes",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "freeze_time_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "pixels_per_frame",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "min_psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "decode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "receive_to_render_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "dropped_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "frames_in_flight",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "rendered_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "max_skipped",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "target_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "qp_sl0",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "actual_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "capture_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "num_encoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "num_decoded_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "num_send_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "num_recv_key_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "recv_key_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video_alice_charlie",
+ .name = "recv_delta_frame_size_bytes",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter},
+ MetricValidationInfo{.test_case = "test_case",
+ .name = "cpu_usage_%",
+ .unit = Unit::kUnitless,
+ .improvement_direction =
+ ImprovementDirection::kSmallerIsBetter}));
+}
+
+TEST(DefaultVideoQualityAnalyzerMetricNamesTest,
+ TestCaseFor3PeerIsTheSameAfterAllPeersLeft) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock());
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ &metrics_logger, options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{"alice", "bob", "charlie"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"bob", "charlie"},
+ /*frames_count=*/5, *frame_generator,
+ /*interframe_delay_ms=*/50);
+ analyzer.UnregisterParticipantInCall("alice");
+ analyzer.UnregisterParticipantInCall("bob");
+ analyzer.UnregisterParticipantInCall("charlie");
+ analyzer.Stop();
+
+ std::vector<std::string> metrics =
+ ToTestCases(metrics_logger.GetCollectedMetrics());
+ EXPECT_THAT(metrics, SizeIs(57));
+ EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(28));
+ EXPECT_THAT(metrics,
+ Contains("test_case/alice_video_alice_charlie").Times(28));
+ EXPECT_THAT(metrics, Contains("test_case").Times(1));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc
new file mode 100644
index 0000000000..79b9286e2d
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc
@@ -0,0 +1,172 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+
+#include <algorithm>
+#include <iterator>
+#include <ostream>
+#include <string>
+
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+namespace {
+
+constexpr int kMicrosPerSecond = 1000000;
+
+} // namespace
+
+std::string StreamCodecInfo::ToString() const {
+ rtc::StringBuilder out;
+ out << "{codec_name=" << codec_name << "; first_frame_id=" << first_frame_id
+ << "; last_frame_id=" << last_frame_id
+ << "; switched_on_at=" << webrtc::ToString(switched_on_at)
+ << "; switched_from_at=" << webrtc::ToString(switched_from_at) << " }";
+ return out.str();
+}
+
+std::ostream& operator<<(std::ostream& os, const StreamCodecInfo& state) {
+ return os << state.ToString();
+}
+
+rtc::StringBuilder& operator<<(rtc::StringBuilder& sb,
+ const StreamCodecInfo& state) {
+ return sb << state.ToString();
+}
+
+bool operator==(const StreamCodecInfo& a, const StreamCodecInfo& b) {
+ return a.codec_name == b.codec_name && a.first_frame_id == b.first_frame_id &&
+ a.last_frame_id == b.last_frame_id &&
+ a.switched_on_at == b.switched_on_at &&
+ a.switched_from_at == b.switched_from_at;
+}
+
+std::string ToString(FrameDropPhase phase) {
+ switch (phase) {
+ case FrameDropPhase::kBeforeEncoder:
+ return "kBeforeEncoder";
+ case FrameDropPhase::kByEncoder:
+ return "kByEncoder";
+ case FrameDropPhase::kTransport:
+ return "kTransport";
+ case FrameDropPhase::kByDecoder:
+ return "kByDecoder";
+ case FrameDropPhase::kAfterDecoder:
+ return "kAfterDecoder";
+ case FrameDropPhase::kLastValue:
+ return "kLastValue";
+ }
+}
+
+std::ostream& operator<<(std::ostream& os, FrameDropPhase phase) {
+ return os << ToString(phase);
+}
+rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, FrameDropPhase phase) {
+ return sb << ToString(phase);
+}
+
+void SamplesRateCounter::AddEvent(Timestamp event_time) {
+ if (event_first_time_.IsMinusInfinity()) {
+ event_first_time_ = event_time;
+ }
+ event_last_time_ = event_time;
+ events_count_++;
+}
+
+double SamplesRateCounter::GetEventsPerSecond() const {
+ RTC_DCHECK(!IsEmpty());
+ // Divide on us and multiply on kMicrosPerSecond to correctly process cases
+ // where there were too small amount of events, so difference is less then 1
+ // sec. We can use us here, because Timestamp has us resolution.
+ return static_cast<double>(events_count_) /
+ (event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
+}
+
+StreamStats::StreamStats(Timestamp stream_started_time)
+ : stream_started_time(stream_started_time) {
+ for (int i = static_cast<int>(FrameDropPhase::kBeforeEncoder);
+ i < static_cast<int>(FrameDropPhase::kLastValue); ++i) {
+ dropped_by_phase.emplace(static_cast<FrameDropPhase>(i), 0);
+ }
+}
+
+std::string StatsKey::ToString() const {
+ rtc::StringBuilder out;
+ out << stream_label << "_" << receiver;
+ return out.str();
+}
+
+bool operator<(const StatsKey& a, const StatsKey& b) {
+ if (a.stream_label != b.stream_label) {
+ return a.stream_label < b.stream_label;
+ }
+ return a.receiver < b.receiver;
+}
+
+bool operator==(const StatsKey& a, const StatsKey& b) {
+ return a.stream_label == b.stream_label && a.receiver == b.receiver;
+}
+
+VideoStreamsInfo::VideoStreamsInfo(
+ std::map<std::string, std::string> stream_to_sender,
+ std::map<std::string, std::set<std::string>> sender_to_streams,
+ std::map<std::string, std::set<std::string>> stream_to_receivers)
+ : stream_to_sender_(std::move(stream_to_sender)),
+ sender_to_streams_(std::move(sender_to_streams)),
+ stream_to_receivers_(std::move(stream_to_receivers)) {}
+
+std::set<StatsKey> VideoStreamsInfo::GetStatsKeys() const {
+ std::set<StatsKey> out;
+ for (const std::string& stream_label : GetStreams()) {
+ for (const std::string& receiver : GetReceivers(stream_label)) {
+ out.insert(StatsKey(stream_label, receiver));
+ }
+ }
+ return out;
+}
+
+std::set<std::string> VideoStreamsInfo::GetStreams() const {
+ std::set<std::string> out;
+ std::transform(stream_to_sender_.begin(), stream_to_sender_.end(),
+ std::inserter(out, out.end()),
+ [](auto map_entry) { return map_entry.first; });
+ return out;
+}
+
+std::set<std::string> VideoStreamsInfo::GetStreams(
+ absl::string_view sender_name) const {
+ auto it = sender_to_streams_.find(std::string(sender_name));
+ if (it == sender_to_streams_.end()) {
+ return {};
+ }
+ return it->second;
+}
+
+absl::optional<std::string> VideoStreamsInfo::GetSender(
+ absl::string_view stream_label) const {
+ auto it = stream_to_sender_.find(std::string(stream_label));
+ if (it == stream_to_sender_.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
+std::set<std::string> VideoStreamsInfo::GetReceivers(
+ absl::string_view stream_label) const {
+ auto it = stream_to_receivers_.find(std::string(stream_label));
+ if (it == stream_to_receivers_.end()) {
+ return {};
+ }
+ return it->second;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h
new file mode 100644
index 0000000000..175f777b68
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h
@@ -0,0 +1,284 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <ostream>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+// WebRTC will request a key frame after 3 seconds if no frames were received.
+// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
+// key frame request.
+constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
+
+class SamplesRateCounter {
+ public:
+ void AddEvent(Timestamp event_time);
+
+ bool IsEmpty() const { return event_first_time_ == event_last_time_; }
+
+ double GetEventsPerSecond() const;
+
+ private:
+ Timestamp event_first_time_ = Timestamp::MinusInfinity();
+ Timestamp event_last_time_ = Timestamp::MinusInfinity();
+ int64_t events_count_ = 0;
+};
+
+struct FrameCounters {
+ // Count of frames, that were passed into WebRTC pipeline by video stream
+ // source.
+ int64_t captured = 0;
+ // Count of frames that reached video encoder.
+ int64_t pre_encoded = 0;
+ // Count of encoded images that were produced by encoder for all requested
+ // spatial layers and simulcast streams.
+ int64_t encoded = 0;
+ // Count of encoded images received in decoder for all requested spatial
+ // layers and simulcast streams.
+ int64_t received = 0;
+ // Count of frames that were produced by decoder.
+ int64_t decoded = 0;
+ // Count of frames that went out from WebRTC pipeline to video sink.
+ int64_t rendered = 0;
+ // Count of frames that were dropped in any point between capturing and
+ // rendering.
+ int64_t dropped = 0;
+ // Count of frames for which decoder returned error when they were sent for
+ // decoding.
+ int64_t failed_to_decode = 0;
+};
+
+// Contains information about the codec that was used for encoding or decoding
+// the stream.
+struct StreamCodecInfo {
+ // Codec implementation name.
+ std::string codec_name;
+ // Id of the first frame for which this codec was used.
+ uint16_t first_frame_id;
+ // Id of the last frame for which this codec was used.
+ uint16_t last_frame_id;
+ // Timestamp when the first frame was handled by the encode/decoder.
+ Timestamp switched_on_at = Timestamp::PlusInfinity();
+ // Timestamp when this codec was used last time.
+ Timestamp switched_from_at = Timestamp::PlusInfinity();
+
+ std::string ToString() const;
+};
+
+std::ostream& operator<<(std::ostream& os, const StreamCodecInfo& state);
+rtc::StringBuilder& operator<<(rtc::StringBuilder& sb,
+ const StreamCodecInfo& state);
+bool operator==(const StreamCodecInfo& a, const StreamCodecInfo& b);
+
+// Represents phases where video frame can be dropped and such drop will be
+// detected by analyzer.
+enum class FrameDropPhase : int {
+ kBeforeEncoder,
+ kByEncoder,
+ kTransport,
+ kByDecoder,
+ kAfterDecoder,
+ // kLastValue must be the last value in this enumeration.
+ kLastValue
+};
+
+std::string ToString(FrameDropPhase phase);
+std::ostream& operator<<(std::ostream& os, FrameDropPhase phase);
+rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, FrameDropPhase phase);
+
+struct StreamStats {
+ explicit StreamStats(Timestamp stream_started_time);
+
+ // The time when the first frame of this stream was captured.
+ Timestamp stream_started_time;
+
+ // Spatial quality metrics.
+ SamplesStatsCounter psnr;
+ SamplesStatsCounter ssim;
+
+ // Time from frame encoded (time point on exit from encoder) to the
+ // encoded image received in decoder (time point on entrance to decoder).
+ SamplesStatsCounter transport_time_ms;
+ // Time from frame was captured on device to time frame was displayed on
+ // device.
+ SamplesStatsCounter total_delay_incl_transport_ms;
+ // Time between frames out from renderer.
+ SamplesStatsCounter time_between_rendered_frames_ms;
+ SamplesRateCounter capture_frame_rate;
+ SamplesRateCounter encode_frame_rate;
+ SamplesStatsCounter encode_time_ms;
+ SamplesStatsCounter decode_time_ms;
+ // Time from last packet of frame is received until it's sent to the renderer.
+ SamplesStatsCounter receive_to_render_time_ms;
+ // Max frames skipped between two nearest.
+ SamplesStatsCounter skipped_between_rendered;
+ // In the next 2 metrics freeze is a pause that is longer, than maximum:
+ // 1. 150ms
+ // 2. 3 * average time between two sequential frames.
+ // Item 1 will cover high fps video and is a duration, that is noticeable by
+ // human eye. Item 2 will cover low fps video like screen sharing.
+ // Freeze duration.
+ SamplesStatsCounter freeze_time_ms;
+ // Mean time between one freeze end and next freeze start.
+ SamplesStatsCounter time_between_freezes_ms;
+ SamplesStatsCounter resolution_of_decoded_frame;
+ SamplesStatsCounter target_encode_bitrate;
+ // Sender side qp values per spatial layer. In case when spatial layer is not
+ // set for `webrtc::EncodedImage`, 0 is used as default.
+ std::map<int, SamplesStatsCounter> spatial_layers_qp;
+
+ int64_t total_encoded_images_payload = 0;
+ // Counters on which phase how many frames were dropped.
+ std::map<FrameDropPhase, int64_t> dropped_by_phase;
+
+ // Frame count metrics.
+ int64_t num_send_key_frames = 0;
+ int64_t num_recv_key_frames = 0;
+
+ // Encoded frame size (in bytes) metrics.
+ SamplesStatsCounter recv_key_frame_size_bytes;
+ SamplesStatsCounter recv_delta_frame_size_bytes;
+
+ // Vector of encoders used for this stream by sending client.
+ std::vector<StreamCodecInfo> encoders;
+ // Vectors of decoders used for this stream by receiving client.
+ std::vector<StreamCodecInfo> decoders;
+};
+
+struct AnalyzerStats {
+ // Size of analyzer internal comparisons queue, measured when new element
+ // id added to the queue.
+ SamplesStatsCounter comparisons_queue_size;
+ // Number of performed comparisons of 2 video frames from captured and
+ // rendered streams.
+ int64_t comparisons_done = 0;
+ // Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
+ // queued when there are too many not processed comparisons in the queue.
+ // Overloaded comparison doesn't include metrics like SSIM and PSNR that
+ // require heavy computations.
+ int64_t cpu_overloaded_comparisons_done = 0;
+ // Number of memory overloaded comparisons. Comparison is memory overloaded if
+ // it is queued when its captured frame was already removed due to high memory
+ // usage for that video stream.
+ int64_t memory_overloaded_comparisons_done = 0;
+ // Count of frames in flight in analyzer measured when new comparison is added
+ // and after analyzer was stopped.
+ SamplesStatsCounter frames_in_flight_left_count;
+
+ // Next metrics are collected and reported iff
+ // `DefaultVideoQualityAnalyzerOptions::report_infra_metrics` is true.
+ SamplesStatsCounter on_frame_captured_processing_time_ms;
+ SamplesStatsCounter on_frame_pre_encode_processing_time_ms;
+ SamplesStatsCounter on_frame_encoded_processing_time_ms;
+ SamplesStatsCounter on_frame_pre_decode_processing_time_ms;
+ SamplesStatsCounter on_frame_decoded_processing_time_ms;
+ SamplesStatsCounter on_frame_rendered_processing_time_ms;
+ SamplesStatsCounter on_decoder_error_processing_time_ms;
+};
+
+struct StatsKey {
+ StatsKey(std::string stream_label, std::string receiver)
+ : stream_label(std::move(stream_label)), receiver(std::move(receiver)) {}
+
+ std::string ToString() const;
+
+ // Label of video stream to which stats belongs to.
+ std::string stream_label;
+ // Name of the peer on which stream was received.
+ std::string receiver;
+};
+
+// Required to use StatsKey as std::map key.
+bool operator<(const StatsKey& a, const StatsKey& b);
+bool operator==(const StatsKey& a, const StatsKey& b);
+
+// Contains all metadata related to the video streams that were seen by the
+// video analyzer.
+class VideoStreamsInfo {
+ public:
+ std::set<StatsKey> GetStatsKeys() const;
+
+ // Returns all stream labels that are known to the video analyzer.
+ std::set<std::string> GetStreams() const;
+
+ // Returns set of the stream for specified `sender_name`. If sender didn't
+ // send any streams or `sender_name` isn't known to the video analyzer
+ // empty set will be returned.
+ std::set<std::string> GetStreams(absl::string_view sender_name) const;
+
+ // Returns sender name for specified `stream_label`. Returns `absl::nullopt`
+ // if provided `stream_label` isn't known to the video analyzer.
+ absl::optional<std::string> GetSender(absl::string_view stream_label) const;
+
+ // Returns set of the receivers for specified `stream_label`. If stream wasn't
+ // received by any peer or `stream_label` isn't known to the video analyzer
+ // empty set will be returned.
+ std::set<std::string> GetReceivers(absl::string_view stream_label) const;
+
+ protected:
+ friend class DefaultVideoQualityAnalyzer;
+ VideoStreamsInfo(
+ std::map<std::string, std::string> stream_to_sender,
+ std::map<std::string, std::set<std::string>> sender_to_streams,
+ std::map<std::string, std::set<std::string>> stream_to_receivers);
+
+ private:
+ std::map<std::string, std::string> stream_to_sender_;
+ std::map<std::string, std::set<std::string>> sender_to_streams_;
+ std::map<std::string, std::set<std::string>> stream_to_receivers_;
+};
+
+struct DefaultVideoQualityAnalyzerOptions {
+ // Tells DefaultVideoQualityAnalyzer if heavy metrics have to be computed.
+ bool compute_psnr = true;
+ bool compute_ssim = true;
+ // If true, weights the luma plane more than the chroma planes in the PSNR.
+ bool use_weighted_psnr = false;
+ // Tells DefaultVideoQualityAnalyzer if detailed frame stats should be
+ // reported.
+ bool report_detailed_frame_stats = false;
+ // Tells DefaultVideoQualityAnalyzer if infra metrics related to the
+ // performance and stability of the analyzer itself should be reported.
+ bool report_infra_metrics = false;
+ // If true DefaultVideoQualityAnalyzer will try to adjust frames before
+ // computing PSNR and SSIM for them. In some cases picture may be shifted by
+ // a few pixels after the encode/decode step. Those difference is invisible
+ // for a human eye, but it affects the metrics. So the adjustment is used to
+ // get metrics that are closer to how human perceive the video. This feature
+ // significantly slows down the comparison, so turn it on only when it is
+ // needed.
+ bool adjust_cropping_before_comparing_frames = false;
+ // Amount of frames that are queued in the DefaultVideoQualityAnalyzer from
+ // the point they were captured to the point they were rendered on all
+ // receivers per stream.
+ size_t max_frames_in_flight_per_stream_count =
+ kDefaultMaxFramesInFlightPerStream;
+ // If true, the analyzer will expect peers to receive their own video streams.
+ bool enable_receive_own_stream = false;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc
new file mode 100644
index 0000000000..d59ef12c63
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
+
+#include <map>
+#include <set>
+
+#include "absl/types/optional.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+
+template <typename T>
+absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
+ auto it = map.find(key);
+ if (it == map.end()) {
+ return absl::nullopt;
+ }
+ return it->second;
+}
+
+} // namespace
+
+StreamState::StreamState(size_t sender,
+ std::set<size_t> receivers,
+ Timestamp stream_started_time)
+ : sender_(sender),
+ stream_started_time_(stream_started_time),
+ receivers_(receivers),
+ frame_ids_(std::move(receivers)) {
+ frame_ids_.AddReader(kAliveFramesQueueIndex);
+ RTC_CHECK_NE(sender_, kAliveFramesQueueIndex);
+ for (size_t receiver : receivers_) {
+ RTC_CHECK_NE(receiver, kAliveFramesQueueIndex);
+ }
+}
+
+uint16_t StreamState::PopFront(size_t peer) {
+ RTC_CHECK_NE(peer, kAliveFramesQueueIndex);
+ absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer);
+ RTC_DCHECK(frame_id.has_value());
+
+ // If alive's frame queue is longer than all others, than also pop frame from
+ // it, because that frame is received by all receivers.
+ size_t alive_size = frame_ids_.size(kAliveFramesQueueIndex);
+ size_t other_size = GetLongestReceiverQueue();
+ // Pops frame from alive queue if alive's queue is the longest one.
+ if (alive_size > other_size) {
+ absl::optional<uint16_t> alive_frame_id =
+ frame_ids_.PopFront(kAliveFramesQueueIndex);
+ RTC_DCHECK(alive_frame_id.has_value());
+ RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
+ }
+
+ return frame_id.value();
+}
+
+void StreamState::AddPeer(size_t peer) {
+ RTC_CHECK_NE(peer, kAliveFramesQueueIndex);
+ frame_ids_.AddReader(peer, kAliveFramesQueueIndex);
+ receivers_.insert(peer);
+}
+
+void StreamState::RemovePeer(size_t peer) {
+ RTC_CHECK_NE(peer, kAliveFramesQueueIndex);
+ frame_ids_.RemoveReader(peer);
+ receivers_.erase(peer);
+
+ // If we removed the last receiver for the alive frames, we need to pop them
+ // from the queue, because now they received by all receivers.
+ size_t alive_size = frame_ids_.size(kAliveFramesQueueIndex);
+ size_t other_size = GetLongestReceiverQueue();
+ while (alive_size > other_size) {
+ frame_ids_.PopFront(kAliveFramesQueueIndex);
+ alive_size--;
+ }
+}
+
+uint16_t StreamState::MarkNextAliveFrameAsDead() {
+ absl::optional<uint16_t> frame_id =
+ frame_ids_.PopFront(kAliveFramesQueueIndex);
+ RTC_DCHECK(frame_id.has_value());
+ return frame_id.value();
+}
+
+void StreamState::SetLastRenderedFrameTime(size_t peer, Timestamp time) {
+ auto it = last_rendered_frame_time_.find(peer);
+ if (it == last_rendered_frame_time_.end()) {
+ last_rendered_frame_time_.insert({peer, time});
+ } else {
+ it->second = time;
+ }
+}
+
+absl::optional<Timestamp> StreamState::last_rendered_frame_time(
+ size_t peer) const {
+ return MaybeGetValue(last_rendered_frame_time_, peer);
+}
+
+size_t StreamState::GetLongestReceiverQueue() const {
+ size_t max = 0;
+ for (size_t receiver : receivers_) {
+ size_t cur_size = frame_ids_.size(receiver);
+ if (cur_size > max) {
+ max = cur_size;
+ }
+ }
+ return max;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h
new file mode 100644
index 0000000000..829a79c7bf
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
+
+#include <limits>
+#include <map>
+#include <set>
+
+#include "absl/types/optional.h"
+#include "api/units/timestamp.h"
+#include "test/pc/e2e/analyzer/video/multi_reader_queue.h"
+
+namespace webrtc {
+
+// Represents a current state of video stream inside
+// DefaultVideoQualityAnalyzer.
+//
+// Maintains the sequence of frames for each video stream and keeps track about
+// which frames were seen by each of the possible stream receiver.
+//
+// Keeps information about which frames are alive and which are dead. Frame is
+// alive if it contains VideoFrame payload for corresponding FrameInFlight
+// object inside DefaultVideoQualityAnalyzer, otherwise frame is considered
+// dead.
+//
+// Supports peer indexes from 0 to max(size_t) - 1.
+class StreamState {
+ public:
+ StreamState(size_t sender,
+ std::set<size_t> receivers,
+ Timestamp stream_started_time);
+
+ size_t sender() const { return sender_; }
+ Timestamp stream_started_time() const { return stream_started_time_; }
+
+ void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
+ // Crash if state is empty.
+ uint16_t PopFront(size_t peer);
+ bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); }
+ // Crash if state is empty.
+ uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); }
+
+ // Adds a new peer to the state. All currently alive frames will be expected
+ // to be received by the newly added peer.
+ void AddPeer(size_t peer);
+
+ // Removes peer from the state. Frames that were expected to be received by
+ // this peer will be removed from it. On the other hand last rendered frame
+ // time for the removed peer will be preserved, because
+ // DefaultVideoQualityAnalyzer still may request it for stats processing.
+ void RemovePeer(size_t peer);
+
+ size_t GetAliveFramesCount() const {
+ return frame_ids_.size(kAliveFramesQueueIndex);
+ }
+ uint16_t MarkNextAliveFrameAsDead();
+
+ void SetLastRenderedFrameTime(size_t peer, Timestamp time);
+ absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
+
+ private:
+ // Index of the `frame_ids_` queue which is used to track alive frames for
+ // this stream.
+ static constexpr size_t kAliveFramesQueueIndex =
+ std::numeric_limits<size_t>::max();
+
+ size_t GetLongestReceiverQueue() const;
+
+ // Index of the owner. Owner's queue in `frame_ids_` will keep alive frames.
+ const size_t sender_;
+ const Timestamp stream_started_time_;
+ std::set<size_t> receivers_;
+ // To correctly determine dropped frames we have to know sequence of frames
+ // in each stream so we will keep a list of frame ids inside the stream.
+ // This list is represented by multi head queue of frame ids with separate
+ // head for each receiver. When the frame is rendered, we will pop ids from
+ // the corresponding head until id will match with rendered one. All ids
+ // before matched one can be considered as dropped:
+ //
+ // | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
+ //
+ // If we received frame with id frame_id3, then we will pop frame_id1 and
+ // frame_id2 and consider those frames as dropped and then compare received
+ // frame with the one from `FrameInFlight` with id frame_id3.
+ MultiReaderQueue<uint16_t> frame_ids_;
+ std::map<size_t, Timestamp> last_rendered_frame_time_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc
new file mode 100644
index 0000000000..01a6aab28a
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
+
+#include <set>
+
+#include "api/units/timestamp.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+TEST(StreamStateTest, PopFrontAndFrontIndependentForEachPeer) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ EXPECT_EQ(state.Front(/*peer=*/1), 1);
+ EXPECT_EQ(state.PopFront(/*peer=*/1), 1);
+ EXPECT_EQ(state.Front(/*peer=*/1), 2);
+ EXPECT_EQ(state.PopFront(/*peer=*/1), 2);
+ EXPECT_EQ(state.Front(/*peer=*/2), 1);
+ EXPECT_EQ(state.PopFront(/*peer=*/2), 1);
+ EXPECT_EQ(state.Front(/*peer=*/2), 2);
+ EXPECT_EQ(state.PopFront(/*peer=*/2), 2);
+}
+
+TEST(StreamStateTest, IsEmpty) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+
+ EXPECT_FALSE(state.IsEmpty(/*peer=*/1));
+
+ state.PopFront(/*peer=*/1);
+
+ EXPECT_TRUE(state.IsEmpty(/*peer=*/1));
+}
+
+TEST(StreamStateTest, PopFrontForOnlyOnePeerDontChangeAliveFramesCount) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 2lu);
+
+ state.PopFront(/*peer=*/1);
+ state.PopFront(/*peer=*/1);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 2lu);
+}
+
+TEST(StreamStateTest, PopFrontForAllPeersReducesAliveFramesCount) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 2lu);
+
+ state.PopFront(/*peer=*/1);
+ state.PopFront(/*peer=*/2);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 1lu);
+}
+
+TEST(StreamStateTest, RemovePeerForLastExpectedReceiverUpdatesAliveFrames) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ state.PopFront(/*peer=*/1);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 2lu);
+
+ state.RemovePeer(/*peer=*/2);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 1lu);
+}
+
+TEST(StreamStateTest, MarkNextAliveFrameAsDeadDecreseAliveFramesCount) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 2lu);
+
+ state.MarkNextAliveFrameAsDead();
+
+ EXPECT_EQ(state.GetAliveFramesCount(), 1lu);
+}
+
+TEST(StreamStateTest, MarkNextAliveFrameAsDeadDoesntAffectFrontFrameForPeer) {
+ StreamState state(/*sender=*/0,
+ /*receivers=*/std::set<size_t>{1, 2},
+ Timestamp::Seconds(1));
+ state.PushBack(/*frame_id=*/1);
+ state.PushBack(/*frame_id=*/2);
+
+ EXPECT_EQ(state.Front(/*peer=*/1), 1);
+
+ state.MarkNextAliveFrameAsDead();
+
+ EXPECT_EQ(state.Front(/*peer=*/1), 1);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
new file mode 100644
index 0000000000..fc970e1ea2
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc
@@ -0,0 +1,2204 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "api/rtp_packet_info.h"
+#include "api/rtp_packet_infos.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/video/encoded_image.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::TestWithParam;
+using ::testing::ValuesIn;
+
+using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample;
+
+constexpr int kAnalyzerMaxThreadsCount = 1;
+constexpr int kMaxFramesInFlightPerStream = 10;
+constexpr int kFrameWidth = 320;
+constexpr int kFrameHeight = 240;
+constexpr double kMaxSsim = 1;
+constexpr char kStreamLabel[] = "video-stream";
+constexpr char kSenderPeerName[] = "alice";
+constexpr char kReceiverPeerName[] = "bob";
+
+DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() {
+ DefaultVideoQualityAnalyzerOptions options;
+ options.compute_psnr = false;
+ options.compute_ssim = false;
+ options.adjust_cropping_before_comparing_frames = false;
+ options.max_frames_in_flight_per_stream_count = kMaxFramesInFlightPerStream;
+ return options;
+}
+
+VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
+ int64_t timestamp_us) {
+ test::FrameGeneratorInterface::VideoFrameData frame_data =
+ frame_generator->NextFrame();
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .set_timestamp_us(timestamp_us)
+ .build();
+}
+
+EncodedImage FakeEncode(const VideoFrame& frame) {
+ EncodedImage image;
+ std::vector<RtpPacketInfo> packet_infos;
+ packet_infos.push_back(RtpPacketInfo(
+ /*ssrc=*/1,
+ /*csrcs=*/{},
+ /*rtp_timestamp=*/frame.timestamp(),
+ /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000)));
+ image.SetPacketInfos(RtpPacketInfos(packet_infos));
+ return image;
+}
+
+VideoFrame DeepCopy(const VideoFrame& frame) {
+ VideoFrame copy = frame;
+ copy.set_video_frame_buffer(
+ I420Buffer::Copy(*frame.video_frame_buffer()->ToI420()));
+ return copy;
+}
+
+std::vector<StatsSample> GetSortedSamples(const SamplesStatsCounter& counter) {
+ rtc::ArrayView<const StatsSample> view = counter.GetTimedSamples();
+ std::vector<StatsSample> out(view.begin(), view.end());
+ std::sort(out.begin(), out.end(),
+ [](const StatsSample& a, const StatsSample& b) {
+ return a.time < b.time;
+ });
+ return out;
+}
+
+std::string ToString(const std::vector<StatsSample>& values) {
+ rtc::StringBuilder out;
+ for (const auto& v : values) {
+ out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, ";
+ }
+ return out.str();
+}
+
+void FakeCPULoad() {
+ std::vector<int> temp(1000000);
+ for (size_t i = 0; i < temp.size(); ++i) {
+ temp[i] = rand();
+ }
+ std::sort(temp.begin(), temp.end());
+ ASSERT_TRUE(std::is_sorted(temp.begin(), temp.end()));
+}
+
+void PassFramesThroughAnalyzer(DefaultVideoQualityAnalyzer& analyzer,
+ absl::string_view sender,
+ absl::string_view stream_label,
+ std::vector<absl::string_view> receivers,
+ int frames_count,
+ test::FrameGeneratorInterface& frame_generator,
+ int interframe_delay_ms = 0) {
+ for (int i = 0; i < frames_count; ++i) {
+ VideoFrame frame = NextFrame(&frame_generator, /*timestamp_us=*/1);
+ uint16_t frame_id =
+ analyzer.OnFrameCaptured(sender, std::string(stream_label), frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode(sender, frame);
+ analyzer.OnFrameEncoded(sender, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ for (absl::string_view receiver : receivers) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(receiver, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(receiver, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(receiver, received_frame);
+ }
+ if (i < frames_count - 1 && interframe_delay_ms > 0) {
+ SleepMs(interframe_delay_ms);
+ }
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ MemoryOverloadedAndThenAllFramesReceived) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done,
+ kMaxFramesInFlightPerStream);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 2);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FillMaxMemoryReceiveAllMemoryOverloadedAndThenAllFramesReceived) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ // Feel analyzer's memory up to limit
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+ frames_order.clear();
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+
+ // Overload analyzer's memory up to limit
+ for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Receive all frames.
+ for (const uint16_t& frame_id : frames_order) {
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done,
+ kMaxFramesInFlightPerStream);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 3);
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 3);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ for (size_t i = kMaxFramesInFlightPerStream; i < frames_order.size(); ++i) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
+
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
+ EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream / 2);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream / 2);
+ EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, captured_frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, 1);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kAlice, frame);
+ SleepMs(20);
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ SleepMs(50);
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(30);
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(10);
+ analyzer.OnFrameRendered(kBob, received_frame);
+ }
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kCharlie, received_frame.id(),
+ FakeEncode(received_frame));
+ SleepMs(40);
+ analyzer.OnFrameDecoded(kCharlie, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ SleepMs(5);
+ analyzer.OnFrameRendered(kCharlie, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(analyzer_stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
+ EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
+
+ VideoStreamsInfo streams_info = analyzer.GetKnownStreams();
+ EXPECT_EQ(streams_info.GetStreams(), std::set<std::string>{kStreamLabel});
+ EXPECT_EQ(streams_info.GetStreams(kAlice),
+ std::set<std::string>{kStreamLabel});
+ EXPECT_EQ(streams_info.GetSender(kStreamLabel), kAlice);
+ EXPECT_EQ(streams_info.GetReceivers(kStreamLabel),
+ (std::set<std::string>{kBob, kCharlie}));
+
+ EXPECT_EQ(streams_info.GetStatsKeys().size(), 2lu);
+ for (auto stream_key : streams_info.GetStatsKeys()) {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(stream_key);
+ // On some devices the pipeline can be too slow, so we actually can't
+ // force real constraints here. Lets just check, that at least 1
+ // frame passed whole pipeline.
+ EXPECT_GE(stream_conters.captured, 10);
+ EXPECT_GE(stream_conters.pre_encoded, 10);
+ EXPECT_GE(stream_conters.encoded, 10);
+ EXPECT_GE(stream_conters.received, 5);
+ EXPECT_GE(stream_conters.decoded, 5);
+ EXPECT_GE(stream_conters.rendered, 5);
+ EXPECT_GE(stream_conters.dropped, 5);
+ }
+
+ std::map<StatsKey, StreamStats> stats = analyzer.GetStats();
+ const StatsKey kAliceBobStats(kStreamLabel, kBob);
+ const StatsKey kAliceCharlieStats(kStreamLabel, kCharlie);
+ EXPECT_EQ(stats.size(), 2lu);
+ {
+ auto it = stats.find(kAliceBobStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_decoded_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_decoded_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_decoded_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+ {
+ auto it = stats.find(kAliceCharlieStats);
+ EXPECT_FALSE(it == stats.end());
+ ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
+ ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
+ EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
+ ASSERT_FALSE(it->second.resolution_of_decoded_frame.IsEmpty());
+ EXPECT_GE(it->second.resolution_of_decoded_frame.GetMin(),
+ kFrameWidth * kFrameHeight - 1);
+ EXPECT_LE(it->second.resolution_of_decoded_frame.GetMax(),
+ kFrameWidth * kFrameHeight + 1);
+ }
+}
+
+// Test the case which can happen when SFU is switching from one layer to
+// another, so the same frame can be received twice by the same peer.
+TEST(DefaultVideoQualityAnalyzerTest,
+ OneFrameReceivedTwiceBySamePeerWith2Receivers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
+ captured_frame.set_id(
+ analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame));
+ analyzer.OnFramePreEncode(kAlice, captured_frame);
+ analyzer.OnFrameEncoded(kAlice, captured_frame.id(),
+ FakeEncode(captured_frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ VideoFrame received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ received_frame = DeepCopy(captured_frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ // We have 2 comparisons here because 1 for the frame received by Bob and
+ // 1 for the frame in flight from Alice to Charlie.
+ EXPECT_EQ(stats.comparisons_done, 2);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.received, 1);
+ EXPECT_EQ(frame_counters.decoded, 1);
+ EXPECT_EQ(frame_counters.rendered, 1);
+ EXPECT_EQ(frame_counters.dropped, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromEqualFrames) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions analyzer_options;
+ analyzer_options.compute_psnr = true;
+ analyzer_options.compute_ssim = true;
+ analyzer_options.adjust_cropping_before_comparing_frames = false;
+ analyzer_options.max_frames_in_flight_per_stream_count =
+ kMaxFramesInFlightPerStream;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ analyzer_options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. Heavy metrics
+ // computation is turned on, so giving some extra time to be sure that
+ // computatio have ended.
+ SleepMs(500);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
+
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ std::map<StatsKey, StreamStats> stream_stats = analyzer.GetStats();
+ const StatsKey kAliceBobStats(kStreamLabel, kReceiverPeerName);
+ EXPECT_EQ(stream_stats.size(), 1lu);
+
+ auto it = stream_stats.find(kAliceBobStats);
+ EXPECT_GE(it->second.psnr.GetMin(), kPerfectPSNR);
+ EXPECT_GE(it->second.ssim.GetMin(), kMaxSsim);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ HeavyQualityMetricsFromShiftedFramesWithAdjustment) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions analyzer_options;
+ analyzer_options.compute_psnr = true;
+ analyzer_options.compute_ssim = true;
+ analyzer_options.adjust_cropping_before_comparing_frames = true;
+ analyzer_options.max_frames_in_flight_per_stream_count =
+ kMaxFramesInFlightPerStream;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ analyzer_options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+
+ VideoFrame received_frame = frame;
+ // Shift frame by a few pixels.
+ test::CropRegion crop_region{0, 1, 3, 0};
+ rtc::scoped_refptr<VideoFrameBuffer> cropped_buffer =
+ CropAndZoom(crop_region, received_frame.video_frame_buffer()->ToI420());
+ received_frame.set_video_frame_buffer(cropped_buffer);
+
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. Heavy metrics
+ // computation is turned on, so giving some extra time to be sure that
+ // computatio have ended.
+ SleepMs(500);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
+
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ std::map<StatsKey, StreamStats> stream_stats = analyzer.GetStats();
+ const StatsKey kAliceBobStats(kStreamLabel, kReceiverPeerName);
+ EXPECT_EQ(stream_stats.size(), 1lu);
+
+ auto it = stream_stats.find(kAliceBobStats);
+ EXPECT_GE(it->second.psnr.GetMin(), kPerfectPSNR);
+ EXPECT_GE(it->second.ssim.GetMin(), kMaxSsim);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, CpuUsage) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Windows CPU clock has low accuracy. We need to fake some additional load to
+ // be sure that the clock ticks (https://crbug.com/webrtc/12249).
+ FakeCPULoad();
+
+ for (size_t i = 1; i < frames_order.size(); i += 2) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ double cpu_usage = analyzer.GetCpuUsagePercent();
+ ASSERT_GT(cpu_usage, 0);
+
+ SleepMs(100);
+ analyzer.Stop();
+
+ EXPECT_EQ(analyzer.GetCpuUsagePercent(), cpu_usage);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, RuntimeParticipantsAdding) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+ constexpr char kKatie[] = "katie";
+
+ constexpr int kFramesCount = 9;
+ constexpr int kOneThirdFrames = kFramesCount / 3;
+ constexpr int kTwoThirdFrames = 2 * kOneThirdFrames;
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case", {}, kAnalyzerMaxThreadsCount);
+
+ std::map<uint16_t, VideoFrame> captured_frames;
+ std::vector<uint16_t> frames_order;
+ analyzer.RegisterParticipantInCall(kAlice);
+ analyzer.RegisterParticipantInCall(kBob);
+
+ // Alice is sending frames.
+ for (int i = 0; i < kFramesCount; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), i);
+ frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
+ frames_order.push_back(frame.id());
+ captured_frames.insert({frame.id(), frame});
+ analyzer.OnFramePreEncode(kAlice, frame);
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Bob receives one third of the sent frames.
+ for (int i = 0; i < kOneThirdFrames; ++i) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+ }
+
+ analyzer.RegisterParticipantInCall(kCharlie);
+ analyzer.RegisterParticipantInCall(kKatie);
+
+ // New participants were dynamically added. Bob and Charlie receive second
+ // third of the sent frames. Katie drops the frames.
+ for (int i = kOneThirdFrames; i < kTwoThirdFrames; ++i) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame bob_received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, bob_received_frame.id(),
+ FakeEncode(bob_received_frame));
+ analyzer.OnFrameDecoded(kBob, bob_received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, bob_received_frame);
+
+ VideoFrame charlie_received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kCharlie, charlie_received_frame.id(),
+ FakeEncode(charlie_received_frame));
+ analyzer.OnFrameDecoded(kCharlie, charlie_received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kCharlie, charlie_received_frame);
+ }
+
+ // Bob, Charlie and Katie receive the rest of the sent frames.
+ for (int i = kTwoThirdFrames; i < kFramesCount; ++i) {
+ uint16_t frame_id = frames_order.at(i);
+ VideoFrame bob_received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kBob, bob_received_frame.id(),
+ FakeEncode(bob_received_frame));
+ analyzer.OnFrameDecoded(kBob, bob_received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, bob_received_frame);
+
+ VideoFrame charlie_received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kCharlie, charlie_received_frame.id(),
+ FakeEncode(charlie_received_frame));
+ analyzer.OnFrameDecoded(kCharlie, charlie_received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kCharlie, charlie_received_frame);
+
+ VideoFrame katie_received_frame = DeepCopy(captured_frames.at(frame_id));
+ analyzer.OnFramePreDecode(kKatie, katie_received_frame.id(),
+ FakeEncode(katie_received_frame));
+ analyzer.OnFrameDecoded(kKatie, katie_received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kKatie, katie_received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
+ EXPECT_EQ(stats.comparisons_done, kFramesCount + 2 * kTwoThirdFrames);
+
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, kFramesCount);
+ EXPECT_EQ(frame_counters.received, 2 * kFramesCount);
+ EXPECT_EQ(frame_counters.decoded, 2 * kFramesCount);
+ EXPECT_EQ(frame_counters.rendered, 2 * kFramesCount);
+ EXPECT_EQ(frame_counters.dropped, kOneThirdFrames);
+
+ const StatsKey kAliceBobStats(kStreamLabel, kBob);
+ const StatsKey kAliceCharlieStats(kStreamLabel, kCharlie);
+ const StatsKey kAliceKatieStats(kStreamLabel, kKatie);
+ EXPECT_EQ(analyzer.GetKnownStreams().GetStatsKeys(),
+ (std::set<StatsKey>{kAliceBobStats, kAliceCharlieStats,
+ kAliceKatieStats}));
+ {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(kAliceBobStats);
+ EXPECT_EQ(stream_conters.captured, kFramesCount);
+ EXPECT_EQ(stream_conters.pre_encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.received, kFramesCount);
+ EXPECT_EQ(stream_conters.decoded, kFramesCount);
+ EXPECT_EQ(stream_conters.rendered, kFramesCount);
+ }
+ {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(kAliceCharlieStats);
+ EXPECT_EQ(stream_conters.captured, kFramesCount);
+ EXPECT_EQ(stream_conters.pre_encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.received, kTwoThirdFrames);
+ EXPECT_EQ(stream_conters.decoded, kTwoThirdFrames);
+ EXPECT_EQ(stream_conters.rendered, kTwoThirdFrames);
+ }
+ {
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(kAliceKatieStats);
+ EXPECT_EQ(stream_conters.captured, kFramesCount);
+ EXPECT_EQ(stream_conters.pre_encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.encoded, kFramesCount);
+ EXPECT_EQ(stream_conters.received, kOneThirdFrames);
+ EXPECT_EQ(stream_conters.decoded, kOneThirdFrames);
+ EXPECT_EQ(stream_conters.rendered, kOneThirdFrames);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ SimulcastFrameWasFullyReceivedByAllPeersBeforeEncodeFinish) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ constexpr char kAlice[] = "alice";
+ constexpr char kBob[] = "bob";
+ constexpr char kCharlie[] = "charlie";
+ analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), 1);
+
+ frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
+ analyzer.OnFramePreEncode(kAlice, frame);
+ // Encode 1st simulcast layer
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ // Receive by Bob
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kBob, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kBob, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kBob, received_frame);
+ // Receive by Charlie
+ received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kCharlie, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kCharlie, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kCharlie, received_frame);
+
+ // Encode 2nd simulcast layer
+ analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.comparisons_done, 2);
+
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 1);
+ EXPECT_EQ(frame_counters.rendered, 2);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FrameCanBeReceivedBySenderAfterItWasReceivedByReceiver) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::vector<VideoFrame> frames;
+ for (int i = 0; i < 3; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), 1);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames.push_back(frame);
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Receive by 2nd peer.
+ for (VideoFrame& frame : frames) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Check that we still have that frame in flight.
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 3)
+ << "Expected that frame is still in flight, "
+ << "because it wasn't received by sender"
+ << ToString(frames_in_flight_sizes);
+
+ // Receive by sender
+ for (VideoFrame& frame : frames) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kSenderPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kSenderPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kSenderPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.comparisons_done, 6);
+
+ frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 3);
+ EXPECT_EQ(frame_counters.rendered, 6);
+
+ EXPECT_EQ(analyzer.GetStats().size(), 2lu);
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kReceiverPeerName));
+ EXPECT_EQ(stream_conters.captured, 3);
+ EXPECT_EQ(stream_conters.pre_encoded, 3);
+ EXPECT_EQ(stream_conters.encoded, 3);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 3);
+ EXPECT_EQ(stream_conters.rendered, 3);
+ }
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kSenderPeerName));
+ EXPECT_EQ(stream_conters.captured, 3);
+ EXPECT_EQ(stream_conters.pre_encoded, 3);
+ EXPECT_EQ(stream_conters.encoded, 3);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 3);
+ EXPECT_EQ(stream_conters.rendered, 3);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FrameCanBeReceivedByReceiverAfterItWasReceivedBySender) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ std::vector<VideoFrame> frames;
+ for (int i = 0; i < 3; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), 1);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames.push_back(frame);
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+
+ // Receive by sender
+ for (VideoFrame& frame : frames) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kSenderPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kSenderPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kSenderPeerName, received_frame);
+ }
+
+ // Check that we still have that frame in flight.
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 3)
+ << "Expected that frame is still in flight, "
+ << "because it wasn't received by sender"
+ << ToString(frames_in_flight_sizes);
+
+ // Receive by 2nd peer.
+ for (VideoFrame& frame : frames) {
+ VideoFrame received_frame = DeepCopy(frame);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.comparisons_done, 6);
+
+ frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 3);
+ EXPECT_EQ(frame_counters.rendered, 6);
+
+ EXPECT_EQ(analyzer.GetStats().size(), 2lu);
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kReceiverPeerName));
+ EXPECT_EQ(stream_conters.captured, 3);
+ EXPECT_EQ(stream_conters.pre_encoded, 3);
+ EXPECT_EQ(stream_conters.encoded, 3);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 3);
+ EXPECT_EQ(stream_conters.rendered, 3);
+ }
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kSenderPeerName));
+ EXPECT_EQ(stream_conters.captured, 3);
+ EXPECT_EQ(stream_conters.pre_encoded, 3);
+ EXPECT_EQ(stream_conters.encoded, 3);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 3);
+ EXPECT_EQ(stream_conters.rendered, 3);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, CodecTrackedCorrectly) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(),
+ AnalyzerOptionsForTest());
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ VideoQualityAnalyzerInterface::EncoderStats encoder_stats;
+ std::vector<std::string> codec_names = {"codec_1", "codec_2"};
+ std::vector<VideoFrame> frames;
+ // Send 3 frame for each codec.
+ for (size_t i = 0; i < codec_names.size(); ++i) {
+ for (size_t j = 0; j < 3; ++j) {
+ VideoFrame frame = NextFrame(frame_generator.get(), 3 * i + j);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ analyzer.OnFramePreEncode(kSenderPeerName, frame);
+ encoder_stats.encoder_name = codec_names[i];
+ analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
+ encoder_stats, false);
+ frames.push_back(std::move(frame));
+ }
+ }
+
+ // Receive 3 frame for each codec.
+ VideoQualityAnalyzerInterface::DecoderStats decoder_stats;
+ for (size_t i = 0; i < codec_names.size(); ++i) {
+ for (size_t j = 0; j < 3; ++j) {
+ VideoFrame received_frame = DeepCopy(frames[3 * i + j]);
+ analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
+ FakeEncode(received_frame));
+ decoder_stats.decoder_name = codec_names[i];
+ analyzer.OnFrameDecoded(kReceiverPeerName, received_frame, decoder_stats);
+ analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
+ }
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ std::map<StatsKey, StreamStats> stats = analyzer.GetStats();
+ ASSERT_EQ(stats.size(), 1lu);
+ const StreamStats& stream_stats =
+ stats.at(StatsKey(kStreamLabel, kReceiverPeerName));
+ ASSERT_EQ(stream_stats.encoders.size(), 2lu);
+ EXPECT_EQ(stream_stats.encoders[0].codec_name, codec_names[0]);
+ EXPECT_EQ(stream_stats.encoders[0].first_frame_id, frames[0].id());
+ EXPECT_EQ(stream_stats.encoders[0].last_frame_id, frames[2].id());
+ EXPECT_EQ(stream_stats.encoders[1].codec_name, codec_names[1]);
+ EXPECT_EQ(stream_stats.encoders[1].first_frame_id, frames[3].id());
+ EXPECT_EQ(stream_stats.encoders[1].last_frame_id, frames[5].id());
+
+ ASSERT_EQ(stream_stats.decoders.size(), 2lu);
+ EXPECT_EQ(stream_stats.decoders[0].codec_name, codec_names[0]);
+ EXPECT_EQ(stream_stats.decoders[0].first_frame_id, frames[0].id());
+ EXPECT_EQ(stream_stats.decoders[0].last_frame_id, frames[2].id());
+ EXPECT_EQ(stream_stats.decoders[1].codec_name, codec_names[1]);
+ EXPECT_EQ(stream_stats.decoders[1].first_frame_id, frames[3].id());
+ EXPECT_EQ(stream_stats.decoders[1].last_frame_id, frames[5].id());
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FramesInFlightAreCorrectlySentToTheComparatorAfterStop) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ // There are 7 different timings inside frame stats: captured, pre_encode,
+ // encoded, received, decode_start, decode_end, rendered. captured is always
+ // set and received is set together with decode_start. So we create 6
+ // different frames, where for each frame next timings will be set
+ // * 1st - all of them set
+ // * 2nd - captured, pre_encode, encoded, received, decode_start, decode_end
+ // * 3rd - captured, pre_encode, encoded, received, decode_start
+ // * 4th - captured, pre_encode, encoded
+ // * 5th - captured, pre_encode
+ // * 6th - captured
+ std::vector<VideoFrame> frames;
+ // Sender side actions
+ for (int i = 0; i < 6; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), 1);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames.push_back(frame);
+ }
+ for (int i = 0; i < 5; ++i) {
+ analyzer.OnFramePreEncode(kSenderPeerName, frames[i]);
+ }
+ for (int i = 0; i < 4; ++i) {
+ analyzer.OnFrameEncoded(
+ kSenderPeerName, frames[i].id(), FakeEncode(frames[i]),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+ }
+
+ // Receiver side actions
+ for (int i = 0; i < 3; ++i) {
+ analyzer.OnFramePreDecode(kReceiverPeerName, frames[i].id(),
+ FakeEncode(frames[i]));
+ }
+ for (int i = 0; i < 2; ++i) {
+ analyzer.OnFrameDecoded(kReceiverPeerName, DeepCopy(frames[i]),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ }
+ for (int i = 0; i < 1; ++i) {
+ analyzer.OnFrameRendered(kReceiverPeerName, DeepCopy(frames[i]));
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.comparisons_done, 6);
+
+ // The last frames in flight size has to reflect the amount of frame in flight
+ // before all of them were sent to the comparison when Stop() was invoked.
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 5)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 6);
+ EXPECT_EQ(frame_counters.pre_encoded, 5);
+ EXPECT_EQ(frame_counters.encoded, 4);
+ EXPECT_EQ(frame_counters.received, 3);
+ EXPECT_EQ(frame_counters.decoded, 2);
+ EXPECT_EQ(frame_counters.rendered, 1);
+
+ EXPECT_EQ(analyzer.GetStats().size(), 1lu);
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kReceiverPeerName));
+ EXPECT_EQ(stream_conters.captured, 6);
+ EXPECT_EQ(stream_conters.pre_encoded, 5);
+ EXPECT_EQ(stream_conters.encoded, 4);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 2);
+ EXPECT_EQ(stream_conters.rendered, 1);
+ }
+}
+
+TEST(
+ DefaultVideoQualityAnalyzerTest,
+ FramesInFlightAreCorrectlySentToTheComparatorAfterStopForSenderAndReceiver) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
+ kAnalyzerMaxThreadsCount);
+
+ // There are 7 different timings inside frame stats: captured, pre_encode,
+ // encoded, received, decode_start, decode_end, rendered. captured is always
+ // set and received is set together with decode_start. So we create 6
+ // different frames, where for each frame next timings will be set
+ // * 1st - all of them set
+ // * 2nd - captured, pre_encode, encoded, received, decode_start, decode_end
+ // * 3rd - captured, pre_encode, encoded, received, decode_start
+ // * 4th - captured, pre_encode, encoded
+ // * 5th - captured, pre_encode
+ // * 6th - captured
+ std::vector<VideoFrame> frames;
+ // Sender side actions
+ for (int i = 0; i < 6; ++i) {
+ VideoFrame frame = NextFrame(frame_generator.get(), 1);
+ frame.set_id(
+ analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
+ frames.push_back(frame);
+ }
+ for (int i = 0; i < 5; ++i) {
+ analyzer.OnFramePreEncode(kSenderPeerName, frames[i]);
+ }
+ for (int i = 0; i < 4; ++i) {
+ analyzer.OnFrameEncoded(
+ kSenderPeerName, frames[i].id(), FakeEncode(frames[i]),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+ }
+
+ // Receiver side actions
+ for (int i = 0; i < 3; ++i) {
+ analyzer.OnFramePreDecode(kSenderPeerName, frames[i].id(),
+ FakeEncode(frames[i]));
+ analyzer.OnFramePreDecode(kReceiverPeerName, frames[i].id(),
+ FakeEncode(frames[i]));
+ }
+ for (int i = 0; i < 2; ++i) {
+ analyzer.OnFrameDecoded(kSenderPeerName, DeepCopy(frames[i]),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameDecoded(kReceiverPeerName, DeepCopy(frames[i]),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ }
+ for (int i = 0; i < 1; ++i) {
+ analyzer.OnFrameRendered(kSenderPeerName, DeepCopy(frames[i]));
+ analyzer.OnFrameRendered(kReceiverPeerName, DeepCopy(frames[i]));
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(analyzer_stats.comparisons_done, 12);
+
+ // The last frames in flight size has to reflect the amount of frame in flight
+ // before all of them were sent to the comparison when Stop() was invoked.
+ std::vector<StatsSample> frames_in_flight_sizes =
+ GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
+ EXPECT_EQ(frames_in_flight_sizes.back().value, 5)
+ << ToString(frames_in_flight_sizes);
+
+ FrameCounters frame_counters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(frame_counters.captured, 6);
+ EXPECT_EQ(frame_counters.pre_encoded, 5);
+ EXPECT_EQ(frame_counters.encoded, 4);
+ EXPECT_EQ(frame_counters.received, 6);
+ EXPECT_EQ(frame_counters.decoded, 4);
+ EXPECT_EQ(frame_counters.rendered, 2);
+
+ EXPECT_EQ(analyzer.GetStats().size(), 2lu);
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kReceiverPeerName));
+ EXPECT_EQ(stream_conters.captured, 6);
+ EXPECT_EQ(stream_conters.pre_encoded, 5);
+ EXPECT_EQ(stream_conters.encoded, 4);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 2);
+ EXPECT_EQ(stream_conters.rendered, 1);
+ }
+ {
+ FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
+ StatsKey(kStreamLabel, kSenderPeerName));
+ EXPECT_EQ(stream_conters.captured, 6);
+ EXPECT_EQ(stream_conters.pre_encoded, 5);
+ EXPECT_EQ(stream_conters.encoded, 4);
+ EXPECT_EQ(stream_conters.received, 3);
+ EXPECT_EQ(stream_conters.decoded, 2);
+ EXPECT_EQ(stream_conters.rendered, 1);
+ }
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, GetStreamFrames) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ // The order in which peers captured frames and passed them to analyzer.
+ std::vector<std::string> frame_capturers_sequence{
+ "alice", "alice", "bob", "bob", "bob",
+ "bob", "bob", "alice", "alice", "alice",
+ };
+
+ std::map<std::string, std::vector<uint16_t>> stream_to_frame_ids;
+ stream_to_frame_ids.emplace("alice_video", std::vector<uint16_t>{});
+ stream_to_frame_ids.emplace("bob_video", std::vector<uint16_t>{});
+
+ std::vector<VideoFrame> frames;
+ for (const std::string& sender : frame_capturers_sequence) {
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id =
+ analyzer.OnFrameCaptured(sender, sender + "_video", frame);
+ frame.set_id(frame_id);
+ stream_to_frame_ids.find(sender + "_video")->second.push_back(frame_id);
+ frames.push_back(frame);
+ analyzer.OnFramePreEncode(sender, frame);
+ analyzer.OnFrameEncoded(sender, frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(),
+ false);
+ }
+ // We don't need to receive frames for stats to be gathered correctly.
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ EXPECT_EQ(analyzer.GetStreamFrames(), stream_to_frame_ids);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, ReceiverReceivedFramesWhenSenderRemoved) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ analyzer.UnregisterParticipantInCall("alice");
+
+ analyzer.OnFramePreDecode("bob", frame.id(), FakeEncode(frame));
+ analyzer.OnFrameDecoded("bob", DeepCopy(frame),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered("bob", DeepCopy(frame));
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 1);
+ EXPECT_EQ(stream_conters.decoded, 1);
+ EXPECT_EQ(stream_conters.rendered, 1);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ ReceiverReceivedFramesWhenSenderRemovedWithSelfview) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ analyzer.UnregisterParticipantInCall("alice");
+
+ analyzer.OnFramePreDecode("bob", frame.id(), FakeEncode(frame));
+ analyzer.OnFrameDecoded("bob", DeepCopy(frame),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered("bob", DeepCopy(frame));
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 1);
+ EXPECT_EQ(stream_conters.decoded, 1);
+ EXPECT_EQ(stream_conters.rendered, 1);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ SenderReceivedFramesWhenReceiverRemovedWithSelfview) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ analyzer.UnregisterParticipantInCall("bob");
+
+ analyzer.OnFramePreDecode("alice", frame.id(), FakeEncode(frame));
+ analyzer.OnFrameDecoded("alice", DeepCopy(frame),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered("alice", DeepCopy(frame));
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "alice"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 1);
+ EXPECT_EQ(stream_conters.decoded, 1);
+ EXPECT_EQ(stream_conters.rendered, 1);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ SenderAndReceiverReceivedFramesWhenReceiverRemovedWithSelfview) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.enable_receive_own_stream = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ analyzer.OnFramePreDecode("bob", frame.id(), FakeEncode(frame));
+ analyzer.OnFrameDecoded("bob", DeepCopy(frame),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered("bob", DeepCopy(frame));
+
+ analyzer.UnregisterParticipantInCall("bob");
+
+ analyzer.OnFramePreDecode("alice", frame.id(), FakeEncode(frame));
+ analyzer.OnFrameDecoded("alice", DeepCopy(frame),
+ VideoQualityAnalyzerInterface::DecoderStats());
+ analyzer.OnFrameRendered("alice", DeepCopy(frame));
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters alice_alice_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "alice"));
+ EXPECT_EQ(alice_alice_stream_conters.captured, 1);
+ EXPECT_EQ(alice_alice_stream_conters.pre_encoded, 1);
+ EXPECT_EQ(alice_alice_stream_conters.encoded, 1);
+ EXPECT_EQ(alice_alice_stream_conters.received, 1);
+ EXPECT_EQ(alice_alice_stream_conters.decoded, 1);
+ EXPECT_EQ(alice_alice_stream_conters.rendered, 1);
+
+ FrameCounters alice_bob_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(alice_bob_stream_conters.captured, 1);
+ EXPECT_EQ(alice_bob_stream_conters.pre_encoded, 1);
+ EXPECT_EQ(alice_bob_stream_conters.encoded, 1);
+ EXPECT_EQ(alice_bob_stream_conters.received, 1);
+ EXPECT_EQ(alice_bob_stream_conters.decoded, 1);
+ EXPECT_EQ(alice_bob_stream_conters.rendered, 1);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforeCapturing2ndFrame) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/1, *frame_generator);
+ analyzer.UnregisterParticipantInCall("bob");
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {},
+ /*frames_count=*/1, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 2);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 2);
+ EXPECT_EQ(global_stream_conters.encoded, 2);
+ EXPECT_EQ(global_stream_conters.received, 1);
+ EXPECT_EQ(global_stream_conters.decoded, 1);
+ EXPECT_EQ(global_stream_conters.rendered, 1);
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 2);
+ EXPECT_EQ(stream_conters.pre_encoded, 2);
+ EXPECT_EQ(stream_conters.encoded, 2);
+ EXPECT_EQ(stream_conters.received, 1);
+ EXPECT_EQ(stream_conters.decoded, 1);
+ EXPECT_EQ(stream_conters.rendered, 1);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforePreEncoded) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.UnregisterParticipantInCall("bob");
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 1);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 1);
+ EXPECT_EQ(global_stream_conters.encoded, 1);
+ EXPECT_EQ(global_stream_conters.received, 0);
+ EXPECT_EQ(global_stream_conters.decoded, 0);
+ EXPECT_EQ(global_stream_conters.rendered, 0);
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 0);
+ EXPECT_EQ(stream_conters.decoded, 0);
+ EXPECT_EQ(stream_conters.rendered, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforeEncoded) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ analyzer.UnregisterParticipantInCall("bob");
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 1);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 1);
+ EXPECT_EQ(global_stream_conters.encoded, 1);
+ EXPECT_EQ(global_stream_conters.received, 0);
+ EXPECT_EQ(global_stream_conters.decoded, 0);
+ EXPECT_EQ(global_stream_conters.rendered, 0);
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 0);
+ EXPECT_EQ(stream_conters.decoded, 0);
+ EXPECT_EQ(stream_conters.rendered, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ ReceiverRemovedBetweenSimulcastLayersEncoded) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ // 1st simulcast layer encoded
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+ analyzer.UnregisterParticipantInCall("bob");
+ // 2nd simulcast layer encoded
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 1);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 1);
+ EXPECT_EQ(global_stream_conters.encoded, 1);
+ EXPECT_EQ(global_stream_conters.received, 0);
+ EXPECT_EQ(global_stream_conters.decoded, 0);
+ EXPECT_EQ(global_stream_conters.rendered, 0);
+ FrameCounters stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(stream_conters.captured, 1);
+ EXPECT_EQ(stream_conters.pre_encoded, 1);
+ EXPECT_EQ(stream_conters.encoded, 1);
+ EXPECT_EQ(stream_conters.received, 0);
+ EXPECT_EQ(stream_conters.decoded, 0);
+ EXPECT_EQ(stream_conters.rendered, 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, UnregisterOneAndRegisterAnother) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{"alice", "bob", "charlie"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"bob", "charlie"},
+ /*frames_count=*/2, *frame_generator);
+ analyzer.UnregisterParticipantInCall("bob");
+ analyzer.RegisterParticipantInCall("david");
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"charlie", "david"},
+ /*frames_count=*/4, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 6);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 6);
+ EXPECT_EQ(global_stream_conters.encoded, 6);
+ EXPECT_EQ(global_stream_conters.received, 12);
+ EXPECT_EQ(global_stream_conters.decoded, 12);
+ EXPECT_EQ(global_stream_conters.rendered, 12);
+ FrameCounters alice_bob_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(alice_bob_stream_conters.captured, 6);
+ EXPECT_EQ(alice_bob_stream_conters.pre_encoded, 6);
+ EXPECT_EQ(alice_bob_stream_conters.encoded, 6);
+ EXPECT_EQ(alice_bob_stream_conters.received, 2);
+ EXPECT_EQ(alice_bob_stream_conters.decoded, 2);
+ EXPECT_EQ(alice_bob_stream_conters.rendered, 2);
+ FrameCounters alice_charlie_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "charlie"));
+ EXPECT_EQ(alice_charlie_stream_conters.captured, 6);
+ EXPECT_EQ(alice_charlie_stream_conters.pre_encoded, 6);
+ EXPECT_EQ(alice_charlie_stream_conters.encoded, 6);
+ EXPECT_EQ(alice_charlie_stream_conters.received, 6);
+ EXPECT_EQ(alice_charlie_stream_conters.decoded, 6);
+ EXPECT_EQ(alice_charlie_stream_conters.rendered, 6);
+ FrameCounters alice_david_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "david"));
+ EXPECT_EQ(alice_david_stream_conters.captured, 6);
+ EXPECT_EQ(alice_david_stream_conters.pre_encoded, 6);
+ EXPECT_EQ(alice_david_stream_conters.encoded, 6);
+ EXPECT_EQ(alice_david_stream_conters.received, 4);
+ EXPECT_EQ(alice_david_stream_conters.decoded, 4);
+ EXPECT_EQ(alice_david_stream_conters.rendered, 4);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ UnregisterOneAndRegisterAnotherRegisterBack) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case",
+ std::vector<std::string>{"alice", "bob", "charlie"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"bob", "charlie"},
+ /*frames_count=*/2, *frame_generator);
+ analyzer.UnregisterParticipantInCall("bob");
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"charlie"},
+ /*frames_count=*/4, *frame_generator);
+ analyzer.RegisterParticipantInCall("bob");
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
+ {"bob", "charlie"},
+ /*frames_count=*/6, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ FrameCounters global_stream_conters = analyzer.GetGlobalCounters();
+ EXPECT_EQ(global_stream_conters.captured, 12);
+ EXPECT_EQ(global_stream_conters.pre_encoded, 12);
+ EXPECT_EQ(global_stream_conters.encoded, 12);
+ EXPECT_EQ(global_stream_conters.received, 20);
+ EXPECT_EQ(global_stream_conters.decoded, 20);
+ EXPECT_EQ(global_stream_conters.rendered, 20);
+ FrameCounters alice_bob_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "bob"));
+ EXPECT_EQ(alice_bob_stream_conters.captured, 12);
+ EXPECT_EQ(alice_bob_stream_conters.pre_encoded, 12);
+ EXPECT_EQ(alice_bob_stream_conters.encoded, 12);
+ EXPECT_EQ(alice_bob_stream_conters.received, 8);
+ EXPECT_EQ(alice_bob_stream_conters.decoded, 8);
+ EXPECT_EQ(alice_bob_stream_conters.rendered, 8);
+ FrameCounters alice_charlie_stream_conters =
+ analyzer.GetPerStreamCounters().at(StatsKey("alice_video", "charlie"));
+ EXPECT_EQ(alice_charlie_stream_conters.captured, 12);
+ EXPECT_EQ(alice_charlie_stream_conters.pre_encoded, 12);
+ EXPECT_EQ(alice_charlie_stream_conters.encoded, 12);
+ EXPECT_EQ(alice_charlie_stream_conters.received, 12);
+ EXPECT_EQ(alice_charlie_stream_conters.decoded, 12);
+ EXPECT_EQ(alice_charlie_stream_conters.rendered, 12);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FramesInFlightAreAccountedForUnregisterPeers) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ // Add one frame in flight which has encode time >= 10ms.
+ VideoFrame frame = NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id = analyzer.OnFrameCaptured("alice", "alice_video", frame);
+ frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", frame);
+ SleepMs(10);
+ analyzer.OnFrameEncoded("alice", frame.id(), FakeEncode(frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+
+ analyzer.UnregisterParticipantInCall("bob");
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ StreamStats stats = analyzer.GetStats().at(StatsKey("alice_video", "bob"));
+ ASSERT_EQ(stats.encode_time_ms.NumSamples(), 1);
+ EXPECT_GE(stats.encode_time_ms.GetAverage(), 10);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsAreReportedWhenRequested) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.report_infra_metrics = true;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/1, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.on_frame_captured_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_frame_pre_encode_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_frame_encoded_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_frame_pre_decode_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_frame_decoded_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_frame_rendered_processing_time_ms.NumSamples(), 1);
+ EXPECT_EQ(stats.on_decoder_error_processing_time_ms.NumSamples(), 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsNotCollectedByDefault) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.report_infra_metrics = false;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/1, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ AnalyzerStats stats = analyzer.GetAnalyzerStats();
+ EXPECT_EQ(stats.on_frame_captured_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_frame_pre_encode_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_frame_encoded_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_frame_pre_decode_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_frame_decoded_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_frame_rendered_processing_time_ms.NumSamples(), 0);
+ EXPECT_EQ(stats.on_decoder_error_processing_time_ms.NumSamples(), 0);
+}
+
+TEST(DefaultVideoQualityAnalyzerTest,
+ FrameDroppedByDecoderIsAccountedCorrectly) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ options.report_infra_metrics = false;
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ VideoFrame to_be_dropped_frame =
+ NextFrame(frame_generator.get(), /*timestamp_us=*/1);
+ uint16_t frame_id =
+ analyzer.OnFrameCaptured("alice", "alice_video", to_be_dropped_frame);
+ to_be_dropped_frame.set_id(frame_id);
+ analyzer.OnFramePreEncode("alice", to_be_dropped_frame);
+ analyzer.OnFrameEncoded("alice", to_be_dropped_frame.id(),
+ FakeEncode(to_be_dropped_frame),
+ VideoQualityAnalyzerInterface::EncoderStats(), false);
+ VideoFrame received_to_be_dropped_frame = DeepCopy(to_be_dropped_frame);
+ analyzer.OnFramePreDecode("bob", received_to_be_dropped_frame.id(),
+ FakeEncode(received_to_be_dropped_frame));
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/1, *frame_generator);
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(100);
+ analyzer.Stop();
+
+ StreamStats stats = analyzer.GetStats().at(StatsKey("alice_video", "bob"));
+ ASSERT_EQ(stats.dropped_by_phase[FrameDropPhase::kByDecoder], 1);
+}
+
+class DefaultVideoQualityAnalyzerTimeBetweenFreezesTest
+ : public TestWithParam<bool> {};
+
+TEST_P(DefaultVideoQualityAnalyzerTimeBetweenFreezesTest,
+ TimeBetweenFreezesIsEqualToStreamDurationWhenThereAreNoFeeezes) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
+ /*type=*/absl::nullopt,
+ /*num_squares=*/absl::nullopt);
+
+ DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
+ DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(),
+ test::GetGlobalMetricsLogger(), options);
+ analyzer.Start("test_case", std::vector<std::string>{"alice", "bob"},
+ kAnalyzerMaxThreadsCount);
+
+ PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"bob"},
+ /*frames_count=*/5, *frame_generator,
+ /*interframe_delay_ms=*/50);
+ if (GetParam()) {
+ analyzer.UnregisterParticipantInCall("bob");
+ }
+
+ // Give analyzer some time to process frames on async thread. The computations
+ // have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
+ // means we have an issue!
+ SleepMs(50);
+ analyzer.Stop();
+
+ StreamStats stats = analyzer.GetStats().at(StatsKey("alice_video", "bob"));
+ ASSERT_EQ(stats.time_between_freezes_ms.NumSamples(), 1);
+ EXPECT_GE(stats.time_between_freezes_ms.GetAverage(), 200);
+}
+
+INSTANTIATE_TEST_SUITE_P(WithRegisteredAndUnregisteredPeerAtTheEndOfTheCall,
+ DefaultVideoQualityAnalyzerTimeBetweenFreezesTest,
+ ValuesIn({true, false}));
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h
new file mode 100644
index 0000000000..384e901462
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_ENCODED_IMAGE_DATA_INJECTOR_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_ENCODED_IMAGE_DATA_INJECTOR_H_
+
+#include <cstdint>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/video/encoded_image.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Injects frame id into EncodedImage on encoder side
+class EncodedImageDataInjector {
+ public:
+ virtual ~EncodedImageDataInjector() = default;
+
+ // Return encoded image with specified `id` and `discard` flag injected into
+ // its payload. `discard` flag mean does analyzing decoder should discard this
+ // encoded image because it belongs to unnecessary simulcast stream or spatial
+ // layer.
+ virtual EncodedImage InjectData(uint16_t id,
+ bool discard,
+ const EncodedImage& source) = 0;
+};
+
+struct EncodedImageExtractionResult {
+ absl::optional<uint16_t> id;
+ EncodedImage image;
+ // Is true if encoded image should be discarded. It is used to filter out
+ // unnecessary spatial layers and simulcast streams.
+ bool discard;
+};
+
+// Extracts frame id from EncodedImage on decoder side.
+class EncodedImageDataExtractor {
+ public:
+ virtual ~EncodedImageDataExtractor() = default;
+
+ // Invoked by framework before any image will come to the extractor.
+ // `expected_receivers_count` is the expected amount of receivers for each
+ // encoded image.
+ virtual void Start(int expected_receivers_count) = 0;
+
+ // Invoked by framework when it is required to add one more receiver for
+ // frames. Will be invoked before that receiver will start receive data.
+ virtual void AddParticipantInCall() = 0;
+
+ // Invoked by framework when it is required to remove receiver for frames.
+ // Will be invoked after that receiver will stop receiving data.
+ virtual void RemoveParticipantInCall() = 0;
+
+ // Returns encoded image id, extracted from payload and also encoded image
+ // with its original payload. For concatenated spatial layers it should be the
+ // same id.
+ virtual EncodedImageExtractionResult ExtractData(
+ const EncodedImage& source) = 0;
+};
+
+class EncodedImageDataPropagator : public EncodedImageDataInjector,
+ public EncodedImageDataExtractor {
+ public:
+ ~EncodedImageDataPropagator() override = default;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_ENCODED_IMAGE_DATA_INJECTOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
new file mode 100644
index 0000000000..da9c53beb9
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/example_video_quality_analyzer.h"
+
+#include "api/array_view.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+ExampleVideoQualityAnalyzer::ExampleVideoQualityAnalyzer() = default;
+ExampleVideoQualityAnalyzer::~ExampleVideoQualityAnalyzer() = default;
+
+void ExampleVideoQualityAnalyzer::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {}
+
+uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured(
+ absl::string_view peer_name,
+ const std::string& stream_label,
+ const webrtc::VideoFrame& frame) {
+ MutexLock lock(&lock_);
+ uint16_t frame_id = next_frame_id_++;
+ if (frame_id == VideoFrame::kNotSetId) {
+ frame_id = next_frame_id_++;
+ }
+ auto it = frames_in_flight_.find(frame_id);
+ if (it == frames_in_flight_.end()) {
+ frames_in_flight_.insert(frame_id);
+ frames_to_stream_label_.insert({frame_id, stream_label});
+ } else {
+ RTC_LOG(LS_WARNING) << "Meet new frame with the same id: " << frame_id
+ << ". Assumes old one as dropped";
+ // We needn't insert frame to frames_in_flight_, because it is already
+ // there.
+ ++frames_dropped_;
+ auto stream_it = frames_to_stream_label_.find(frame_id);
+ RTC_CHECK(stream_it != frames_to_stream_label_.end());
+ stream_it->second = stream_label;
+ }
+ ++frames_captured_;
+ return frame_id;
+}
+
+void ExampleVideoQualityAnalyzer::OnFramePreEncode(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame) {
+ MutexLock lock(&lock_);
+ ++frames_pre_encoded_;
+}
+
+void ExampleVideoQualityAnalyzer::OnFrameEncoded(
+ absl::string_view peer_name,
+ uint16_t frame_id,
+ const webrtc::EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) {
+ MutexLock lock(&lock_);
+ ++frames_encoded_;
+}
+
+void ExampleVideoQualityAnalyzer::OnFrameDropped(
+ absl::string_view peer_name,
+ webrtc::EncodedImageCallback::DropReason reason) {
+ RTC_LOG(LS_INFO) << "Frame dropped by encoder";
+ MutexLock lock(&lock_);
+ ++frames_dropped_;
+}
+
+void ExampleVideoQualityAnalyzer::OnFramePreDecode(
+ absl::string_view peer_name,
+ uint16_t frame_id,
+ const webrtc::EncodedImage& encoded_image) {
+ MutexLock lock(&lock_);
+ ++frames_received_;
+}
+
+void ExampleVideoQualityAnalyzer::OnFrameDecoded(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame,
+ const DecoderStats& stats) {
+ MutexLock lock(&lock_);
+ ++frames_decoded_;
+}
+
+void ExampleVideoQualityAnalyzer::OnFrameRendered(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame) {
+ MutexLock lock(&lock_);
+ frames_in_flight_.erase(frame.id());
+ ++frames_rendered_;
+}
+
+void ExampleVideoQualityAnalyzer::OnEncoderError(
+ absl::string_view peer_name,
+ const webrtc::VideoFrame& frame,
+ int32_t error_code) {
+ RTC_LOG(LS_ERROR) << "Failed to encode frame " << frame.id()
+ << ". Code: " << error_code;
+}
+
+void ExampleVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code,
+ const DecoderStats& stats) {
+ RTC_LOG(LS_ERROR) << "Failed to decode frame " << frame_id
+ << ". Code: " << error_code;
+}
+
+void ExampleVideoQualityAnalyzer::Stop() {
+ MutexLock lock(&lock_);
+ RTC_LOG(LS_INFO) << "There are " << frames_in_flight_.size()
+ << " frames in flight, assuming all of them are dropped";
+ frames_dropped_ += frames_in_flight_.size();
+}
+
+std::string ExampleVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
+ MutexLock lock(&lock_);
+ auto it = frames_to_stream_label_.find(frame_id);
+ RTC_DCHECK(it != frames_to_stream_label_.end())
+ << "Unknown frame_id=" << frame_id;
+ return it->second;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_captured() const {
+ MutexLock lock(&lock_);
+ return frames_captured_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_pre_encoded() const {
+ MutexLock lock(&lock_);
+ return frames_pre_encoded_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_encoded() const {
+ MutexLock lock(&lock_);
+ return frames_encoded_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_received() const {
+ MutexLock lock(&lock_);
+ return frames_received_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_decoded() const {
+ MutexLock lock(&lock_);
+ return frames_decoded_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_rendered() const {
+ MutexLock lock(&lock_);
+ return frames_rendered_;
+}
+
+uint64_t ExampleVideoQualityAnalyzer::frames_dropped() const {
+ MutexLock lock(&lock_);
+ return frames_dropped_;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
new file mode 100644
index 0000000000..af4868a961
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_EXAMPLE_VIDEO_QUALITY_ANALYZER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_EXAMPLE_VIDEO_QUALITY_ANALYZER_H_
+
+#include <atomic>
+#include <map>
+#include <set>
+#include <string>
+
+#include "api/array_view.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+
+// This class is an example implementation of
+// webrtc::VideoQualityAnalyzerInterface and calculates simple metrics
+// just to demonstration purposes. Assumed to be used in the single process
+// test cases, where both peers are in the same process.
+class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
+ public:
+ ExampleVideoQualityAnalyzer();
+ ~ExampleVideoQualityAnalyzer() override;
+
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) override;
+ uint16_t OnFrameCaptured(absl::string_view peer_name,
+ const std::string& stream_label,
+ const VideoFrame& frame) override;
+ void OnFramePreEncode(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnFrameEncoded(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& encoded_image,
+ const EncoderStats& stats,
+ bool discarded) override;
+ void OnFrameDropped(absl::string_view peer_name,
+ EncodedImageCallback::DropReason reason) override;
+ void OnFramePreDecode(absl::string_view peer_name,
+ uint16_t frame_id,
+ const EncodedImage& encoded_image) override;
+ void OnFrameDecoded(absl::string_view peer_name,
+ const VideoFrame& frame,
+ const DecoderStats& stats) override;
+ void OnFrameRendered(absl::string_view peer_name,
+ const VideoFrame& frame) override;
+ void OnEncoderError(absl::string_view peer_name,
+ const VideoFrame& frame,
+ int32_t error_code) override;
+ void OnDecoderError(absl::string_view peer_name,
+ uint16_t frame_id,
+ int32_t error_code,
+ const DecoderStats& stats) override;
+ void Stop() override;
+ std::string GetStreamLabel(uint16_t frame_id) override;
+
+ uint64_t frames_captured() const;
+ uint64_t frames_pre_encoded() const;
+ uint64_t frames_encoded() const;
+ uint64_t frames_received() const;
+ uint64_t frames_decoded() const;
+ uint64_t frames_rendered() const;
+ uint64_t frames_dropped() const;
+
+ private:
+ // When peer A captured the frame it will come into analyzer's OnFrameCaptured
+ // and will be stored in frames_in_flight_. It will be removed from there
+ // when it will be received in peer B, so we need to guard it with lock.
+ // Also because analyzer will serve for all video streams it can be called
+ // from different threads inside one peer.
+ mutable Mutex lock_;
+ // Stores frame ids, that are currently going from one peer to another. We
+ // need to keep them to correctly determine dropped frames and also correctly
+ // process frame id overlap.
+ std::set<uint16_t> frames_in_flight_ RTC_GUARDED_BY(lock_);
+ std::map<uint16_t, std::string> frames_to_stream_label_ RTC_GUARDED_BY(lock_);
+ uint16_t next_frame_id_ RTC_GUARDED_BY(lock_) = 1;
+ uint64_t frames_captured_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_pre_encoded_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_encoded_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_received_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_decoded_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_rendered_ RTC_GUARDED_BY(lock_) = 0;
+ uint64_t frames_dropped_ RTC_GUARDED_BY(lock_) = 0;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_EXAMPLE_VIDEO_QUALITY_ANALYZER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue.h
new file mode 100644
index 0000000000..39d26b42bc
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_MULTI_READER_QUEUE_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_MULTI_READER_QUEUE_H_
+
+#include <deque>
+#include <memory>
+#include <set>
+#include <unordered_map>
+
+#include "absl/types/optional.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+// Represents the queue which can be read by multiple readers. Each reader reads
+// from its own queue head. When an element is added it will become visible for
+// all readers. When an element will be removed by all the readers, the element
+// will be removed from the queue.
+template <typename T>
+class MultiReaderQueue {
+ public:
+ // Creates queue with exactly `readers_count` readers named from 0 to
+ // `readers_count - 1`.
+ explicit MultiReaderQueue(size_t readers_count) {
+ for (size_t i = 0; i < readers_count; ++i) {
+ heads_[i] = 0;
+ }
+ }
+ // Creates queue with specified readers.
+ explicit MultiReaderQueue(std::set<size_t> readers) {
+ for (size_t reader : readers) {
+ heads_[reader] = 0;
+ }
+ }
+
+ // Adds a new `reader`, initializing its reading position (the reader's head)
+ // equal to the one of `reader_to_copy`.
+ // Complexity O(MultiReaderQueue::size(reader_to_copy)).
+ void AddReader(size_t reader, size_t reader_to_copy) {
+ size_t pos = GetHeadPositionOrDie(reader_to_copy);
+
+ auto it = heads_.find(reader);
+ RTC_CHECK(it == heads_.end())
+ << "Reader " << reader << " is already in the queue";
+ heads_[reader] = heads_[reader_to_copy];
+ for (size_t i = pos; i < queue_.size(); ++i) {
+ in_queues_[i]++;
+ }
+ }
+
+ // Adds a new `reader`, initializing its reading position equal to first
+ // element in the queue.
+ // Complexity O(MultiReaderQueue::size()).
+ void AddReader(size_t reader) {
+ auto it = heads_.find(reader);
+ RTC_CHECK(it == heads_.end())
+ << "Reader " << reader << " is already in the queue";
+ heads_[reader] = removed_elements_count_;
+ for (size_t i = 0; i < queue_.size(); ++i) {
+ in_queues_[i]++;
+ }
+ }
+
+ // Removes specified `reader` from the queue.
+ // Complexity O(MultiReaderQueue::size(reader)).
+ void RemoveReader(size_t reader) {
+ size_t pos = GetHeadPositionOrDie(reader);
+ for (size_t i = pos; i < queue_.size(); ++i) {
+ in_queues_[i]--;
+ }
+ while (!in_queues_.empty() && in_queues_[0] == 0) {
+ PopFront();
+ }
+ heads_.erase(reader);
+ }
+
+ // Add value to the end of the queue. Complexity O(1).
+ void PushBack(T value) {
+ queue_.push_back(value);
+ in_queues_.push_back(heads_.size());
+ }
+
+ // Extract element from specified head. Complexity O(1).
+ absl::optional<T> PopFront(size_t reader) {
+ size_t pos = GetHeadPositionOrDie(reader);
+ if (pos >= queue_.size()) {
+ return absl::nullopt;
+ }
+
+ T out = queue_[pos];
+
+ in_queues_[pos]--;
+ heads_[reader]++;
+
+ if (in_queues_[pos] == 0) {
+ RTC_DCHECK_EQ(pos, 0);
+ PopFront();
+ }
+ return out;
+ }
+
+ // Returns element at specified head. Complexity O(1).
+ absl::optional<T> Front(size_t reader) const {
+ size_t pos = GetHeadPositionOrDie(reader);
+ if (pos >= queue_.size()) {
+ return absl::nullopt;
+ }
+ return queue_[pos];
+ }
+
+ // Returns true if for specified head there are no more elements in the queue
+ // or false otherwise. Complexity O(1).
+ bool IsEmpty(size_t reader) const {
+ size_t pos = GetHeadPositionOrDie(reader);
+ return pos >= queue_.size();
+ }
+
+ // Returns size of the longest queue between all readers.
+ // Complexity O(1).
+ size_t size() const { return queue_.size(); }
+
+ // Returns size of the specified queue. Complexity O(1).
+ size_t size(size_t reader) const {
+ size_t pos = GetHeadPositionOrDie(reader);
+ return queue_.size() - pos;
+ }
+
+ // Complexity O(1).
+ size_t readers_count() const { return heads_.size(); }
+
+ private:
+ size_t GetHeadPositionOrDie(size_t reader) const {
+ auto it = heads_.find(reader);
+ RTC_CHECK(it != heads_.end()) << "No queue for reader " << reader;
+ return it->second - removed_elements_count_;
+ }
+
+ void PopFront() {
+ RTC_DCHECK(!queue_.empty());
+ RTC_DCHECK_EQ(in_queues_[0], 0);
+ queue_.pop_front();
+ in_queues_.pop_front();
+ removed_elements_count_++;
+ }
+
+ // Number of the elements that were removed from the queue. It is used to
+ // subtract from each head to compute the right index inside `queue_`;
+ size_t removed_elements_count_ = 0;
+ std::deque<T> queue_;
+ // In how may queues the element at index `i` is. An element can be removed
+ // from the front if and only if it is in 0 queues.
+ std::deque<size_t> in_queues_;
+ // Map from the reader to the head position in the queue.
+ std::unordered_map<size_t, size_t> heads_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_MULTI_READER_QUEUE_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc
new file mode 100644
index 0000000000..ea6aa0a416
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/multi_reader_queue.h"
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+TEST(MultiReaderQueueTest, EmptyQueueEmptyForAllHeads) {
+ MultiReaderQueue<int> queue = MultiReaderQueue<int>(/*readers_count=*/10);
+ EXPECT_EQ(queue.size(), 0lu);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_TRUE(queue.IsEmpty(/*reader=*/i));
+ EXPECT_EQ(queue.size(/*reader=*/i), 0lu);
+ EXPECT_FALSE(queue.PopFront(/*reader=*/i).has_value());
+ EXPECT_FALSE(queue.Front(/*reader=*/i).has_value());
+ }
+}
+
+TEST(MultiReaderQueueTest, SizeIsEqualForAllHeadsAfterAddOnly) {
+ MultiReaderQueue<int> queue = MultiReaderQueue<int>(/*readers_count=*/10);
+ queue.PushBack(1);
+ queue.PushBack(2);
+ queue.PushBack(3);
+ EXPECT_EQ(queue.size(), 3lu);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_FALSE(queue.IsEmpty(/*reader=*/i));
+ EXPECT_EQ(queue.size(/*reader=*/i), 3lu);
+ }
+}
+
+TEST(MultiReaderQueueTest, SizeIsCorrectAfterRemoveFromOnlyOneHead) {
+ MultiReaderQueue<int> queue = MultiReaderQueue<int>(/*readers_count=*/10);
+ for (int i = 0; i < 5; ++i) {
+ queue.PushBack(i);
+ }
+ EXPECT_EQ(queue.size(), 5lu);
+ // Removing elements from queue #0
+ for (int i = 0; i < 5; ++i) {
+ EXPECT_EQ(queue.size(/*reader=*/0), static_cast<size_t>(5 - i));
+ EXPECT_EQ(queue.PopFront(/*reader=*/0), absl::optional<int>(i));
+ for (int j = 1; j < 10; ++j) {
+ EXPECT_EQ(queue.size(/*reader=*/j), 5lu);
+ }
+ }
+ EXPECT_EQ(queue.size(/*reader=*/0), 0lu);
+ EXPECT_TRUE(queue.IsEmpty(/*reader=*/0));
+}
+
+TEST(MultiReaderQueueTest, SingleHeadOneAddOneRemove) {
+ MultiReaderQueue<int> queue = MultiReaderQueue<int>(/*readers_count=*/1);
+ queue.PushBack(1);
+ EXPECT_EQ(queue.size(), 1lu);
+ EXPECT_TRUE(queue.Front(/*reader=*/0).has_value());
+ EXPECT_EQ(queue.Front(/*reader=*/0).value(), 1);
+ absl::optional<int> value = queue.PopFront(/*reader=*/0);
+ EXPECT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), 1);
+ EXPECT_EQ(queue.size(), 0lu);
+ EXPECT_TRUE(queue.IsEmpty(/*reader=*/0));
+}
+
+TEST(MultiReaderQueueTest, SingleHead) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/1);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ EXPECT_EQ(queue.Front(/*reader=*/0), absl::optional<size_t>(i));
+ EXPECT_EQ(queue.PopFront(/*reader=*/0), absl::optional<size_t>(i));
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ }
+}
+
+TEST(MultiReaderQueueTest, ThreeHeadsAddAllRemoveAllPerHead) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(/*reader=*/0);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(/*reader=*/1);
+ EXPECT_EQ(queue.size(), 10lu);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(/*reader=*/2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiReaderQueueTest, ThreeHeadsAddAllRemoveAll) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/3);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ EXPECT_EQ(queue.size(), i + 1);
+ }
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value1 = queue.PopFront(/*reader=*/0);
+ absl::optional<size_t> value2 = queue.PopFront(/*reader=*/1);
+ absl::optional<size_t> value3 = queue.PopFront(/*reader=*/2);
+ EXPECT_EQ(queue.size(), 10 - i - 1);
+ ASSERT_TRUE(value1.has_value());
+ ASSERT_TRUE(value2.has_value());
+ ASSERT_TRUE(value3.has_value());
+ EXPECT_EQ(value1.value(), i);
+ EXPECT_EQ(value2.value(), i);
+ EXPECT_EQ(value3.value(), i);
+ }
+}
+
+TEST(MultiReaderQueueTest, AddReaderSeeElementsOnlyFromReaderToCopy) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/2);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ }
+ for (size_t i = 0; i < 5; ++i) {
+ queue.PopFront(0);
+ }
+
+ queue.AddReader(/*reader=*/2, /*reader_to_copy=*/0);
+
+ EXPECT_EQ(queue.readers_count(), 3lu);
+ for (size_t i = 5; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(/*reader=*/2);
+ EXPECT_EQ(queue.size(/*reader=*/2), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiReaderQueueTest, AddReaderWithoutReaderToCopySeeFullQueue) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/2);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ }
+ for (size_t i = 0; i < 5; ++i) {
+ queue.PopFront(/*reader=*/0);
+ }
+
+ queue.AddReader(/*reader=*/2);
+
+ EXPECT_EQ(queue.readers_count(), 3lu);
+ for (size_t i = 0; i < 10; ++i) {
+ absl::optional<size_t> value = queue.PopFront(/*reader=*/2);
+ EXPECT_EQ(queue.size(/*reader=*/2), 10 - i - 1);
+ ASSERT_TRUE(value.has_value());
+ EXPECT_EQ(value.value(), i);
+ }
+}
+
+TEST(MultiReaderQueueTest, RemoveReaderWontChangeOthers) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/2);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ }
+ EXPECT_EQ(queue.size(/*reader=*/1), 10lu);
+
+ queue.RemoveReader(0);
+
+ EXPECT_EQ(queue.readers_count(), 1lu);
+ EXPECT_EQ(queue.size(/*reader=*/1), 10lu);
+}
+
+TEST(MultiReaderQueueTest, RemoveLastReaderMakesQueueEmpty) {
+ MultiReaderQueue<size_t> queue =
+ MultiReaderQueue<size_t>(/*readers_count=*/1);
+ for (size_t i = 0; i < 10; ++i) {
+ queue.PushBack(i);
+ }
+ EXPECT_EQ(queue.size(), 10lu);
+
+ queue.RemoveReader(0);
+
+ EXPECT_EQ(queue.size(), 0lu);
+ EXPECT_EQ(queue.readers_count(), 0lu);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.cc
new file mode 100644
index 0000000000..3ccab620f8
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/names_collection.h"
+
+#include <set>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+NamesCollection::NamesCollection(rtc::ArrayView<const std::string> names) {
+ names_ = std::vector<std::string>(names.begin(), names.end());
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ removed_.emplace_back(false);
+ }
+ size_ = names_.size();
+}
+
+bool NamesCollection::HasName(absl::string_view name) const {
+ auto it = index_.find(name);
+ if (it == index_.end()) {
+ return false;
+ }
+ return !removed_[it->second];
+}
+
+size_t NamesCollection::AddIfAbsent(absl::string_view name) {
+ auto it = index_.find(name);
+ if (it != index_.end()) {
+ // Name was registered in the collection before: we need to restore it.
+ size_t index = it->second;
+ if (removed_[index]) {
+ removed_[index] = false;
+ size_++;
+ }
+ return index;
+ }
+ size_t out = names_.size();
+ size_t old_capacity = names_.capacity();
+ names_.emplace_back(name);
+ removed_.emplace_back(false);
+ size_++;
+ size_t new_capacity = names_.capacity();
+
+ if (old_capacity == new_capacity) {
+ index_.emplace(names_[out], out);
+ } else {
+ // Reallocation happened in the vector, so we need to rebuild `index_` to
+ // fix absl::string_view internal references.
+ index_.clear();
+ for (size_t i = 0; i < names_.size(); ++i) {
+ index_.emplace(names_[i], i);
+ }
+ }
+ return out;
+}
+
+absl::optional<size_t> NamesCollection::RemoveIfPresent(
+ absl::string_view name) {
+ auto it = index_.find(name);
+ if (it == index_.end()) {
+ return absl::nullopt;
+ }
+ size_t index = it->second;
+ if (removed_[index]) {
+ return absl::nullopt;
+ }
+ removed_[index] = true;
+ size_--;
+ return index;
+}
+
+std::set<size_t> NamesCollection::GetPresentIndexes() const {
+ std::set<size_t> out;
+ for (size_t i = 0; i < removed_.size(); ++i) {
+ if (!removed_[i]) {
+ out.insert(i);
+ }
+ }
+ return out;
+}
+
+std::set<size_t> NamesCollection::GetAllIndexes() const {
+ std::set<size_t> out;
+ for (size_t i = 0; i < names_.size(); ++i) {
+ out.insert(i);
+ }
+ return out;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.h
new file mode 100644
index 0000000000..f9a13a2a11
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_NAMES_COLLECTION_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_NAMES_COLLECTION_H_
+
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+
+namespace webrtc {
+
+// Contains mapping between string names and unique size_t values (indexes).
+// Once the name is added to the collection it is guaranteed:
+// 1. Name will have the same index until collection will be destructed
+// 2. Adding, removing and re-adding name won't change its index
+//
+// The name is considered in the collection if it was added and wasn't removed.
+// Adding the name when it is in the collection won't change the collection, the
+// same as removing the name when it is removed.
+//
+// Collection will return name's index and name for the index independently from
+// was name removed or not. Once the name was added to the collection the index
+// will be allocated for it. To check if name is in collection right now user
+// has to explicitly call to `HasName` function.
+class NamesCollection {
+ public:
+ NamesCollection() = default;
+
+ explicit NamesCollection(rtc::ArrayView<const std::string> names);
+
+ // Returns amount of currently presented names in the collection.
+ size_t size() const { return size_; }
+
+ // Returns amount of all names known to this collection.
+ size_t GetKnownSize() const { return names_.size(); }
+
+ // Returns index of the `name` which was known to the collection. Crashes
+ // if `name` was never registered in the collection.
+ size_t index(absl::string_view name) const { return index_.at(name); }
+
+ // Returns name which was known to the collection for the specified `index`.
+ // Crashes if there was no any name registered in the collection for such
+ // `index`.
+ const std::string& name(size_t index) const { return names_.at(index); }
+
+ // Returns if `name` is currently presented in this collection.
+ bool HasName(absl::string_view name) const;
+
+ // Adds specified `name` to the collection if it isn't presented.
+ // Returns index which corresponds to specified `name`.
+ size_t AddIfAbsent(absl::string_view name);
+
+ // Removes specified `name` from the collection if it is presented.
+ //
+ // After name was removed, collection size will be decreased, but `name` index
+ // will be preserved. Collection will return false for `HasName(name)`, but
+ // will continue to return previously known index for `index(name)` and return
+ // `name` for `name(index(name))`.
+ //
+ // Returns the index of the removed value or absl::nullopt if no such `name`
+ // registered in the collection.
+ absl::optional<size_t> RemoveIfPresent(absl::string_view name);
+
+ // Returns a set of indexes for all currently present names in the
+ // collection.
+ std::set<size_t> GetPresentIndexes() const;
+
+ // Returns a set of all indexes known to the collection including indexes for
+ // names that were removed.
+ std::set<size_t> GetAllIndexes() const;
+
+ private:
+ std::vector<std::string> names_;
+ std::vector<bool> removed_;
+ std::map<absl::string_view, size_t> index_;
+ size_t size_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_NAMES_COLLECTION_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection_test.cc
new file mode 100644
index 0000000000..6c52f96975
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/names_collection_test.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/names_collection.h"
+
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Ne;
+
+TEST(NamesCollectionTest, NamesFromCtorHasUniqueIndexes) {
+ NamesCollection collection(std::vector<std::string>{"alice", "bob"});
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(2)));
+ EXPECT_TRUE(collection.HasName("alice"));
+ EXPECT_THAT(collection.name(collection.index("alice")), Eq("alice"));
+
+ EXPECT_TRUE(collection.HasName("bob"));
+ EXPECT_THAT(collection.name(collection.index("bob")), Eq("bob"));
+
+ EXPECT_THAT(collection.index("bob"), Ne(collection.index("alice")));
+}
+
+TEST(NamesCollectionTest, AddedNamesHasIndexes) {
+ NamesCollection collection(std::vector<std::string>{});
+ collection.AddIfAbsent("alice");
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_TRUE(collection.HasName("alice"));
+ EXPECT_THAT(collection.name(collection.index("alice")), Eq("alice"));
+}
+
+TEST(NamesCollectionTest, AddBobDoesNotChangeAliceIndex) {
+ NamesCollection collection(std::vector<std::string>{"alice"});
+
+ size_t alice_index = collection.index("alice");
+
+ collection.AddIfAbsent("bob");
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(2)));
+ EXPECT_THAT(collection.index("alice"), Eq(alice_index));
+ EXPECT_THAT(collection.index("bob"), Ne(alice_index));
+}
+
+TEST(NamesCollectionTest, AddAliceSecondTimeDoesNotChangeIndex) {
+ NamesCollection collection(std::vector<std::string>{"alice"});
+
+ size_t alice_index = collection.index("alice");
+
+ EXPECT_THAT(collection.AddIfAbsent("alice"), Eq(alice_index));
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_THAT(collection.index("alice"), Eq(alice_index));
+}
+
+TEST(NamesCollectionTest, RemoveRemovesFromCollectionButNotIndex) {
+ NamesCollection collection(std::vector<std::string>{"alice", "bob"});
+
+ size_t bob_index = collection.index("bob");
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(2)));
+
+ EXPECT_THAT(collection.RemoveIfPresent("bob"),
+ Eq(absl::optional<size_t>(bob_index)));
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_FALSE(collection.HasName("bob"));
+
+ EXPECT_THAT(collection.index("bob"), Eq(bob_index));
+ EXPECT_THAT(collection.name(bob_index), Eq("bob"));
+}
+
+TEST(NamesCollectionTest, RemoveOfAliceDoesNotChangeBobIndex) {
+ NamesCollection collection(std::vector<std::string>{"alice", "bob"});
+
+ size_t alice_index = collection.index("alice");
+ size_t bob_index = collection.index("bob");
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(2)));
+
+ EXPECT_THAT(collection.RemoveIfPresent("alice"),
+ Eq(absl::optional<size_t>(alice_index)));
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_THAT(collection.index("bob"), Eq(bob_index));
+ EXPECT_THAT(collection.name(bob_index), Eq("bob"));
+}
+
+TEST(NamesCollectionTest, RemoveSecondTimeHasNoEffect) {
+ NamesCollection collection(std::vector<std::string>{"bob"});
+
+ size_t bob_index = collection.index("bob");
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_THAT(collection.RemoveIfPresent("bob"),
+ Eq(absl::optional<size_t>(bob_index)));
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(0)));
+ EXPECT_THAT(collection.RemoveIfPresent("bob"), Eq(absl::nullopt));
+}
+
+TEST(NamesCollectionTest, RemoveOfNotExistingHasNoEffect) {
+ NamesCollection collection(std::vector<std::string>{"bob"});
+
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+ EXPECT_THAT(collection.RemoveIfPresent("alice"), Eq(absl::nullopt));
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+}
+
+TEST(NamesCollectionTest, AddRemoveAddPreserveTheIndex) {
+ NamesCollection collection(std::vector<std::string>{});
+
+ size_t alice_index = collection.AddIfAbsent("alice");
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+
+ EXPECT_THAT(collection.RemoveIfPresent("alice"),
+ Eq(absl::optional<size_t>(alice_index)));
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(0)));
+
+ EXPECT_THAT(collection.AddIfAbsent("alice"), Eq(alice_index));
+ EXPECT_THAT(collection.index("alice"), Eq(alice_index));
+ EXPECT_THAT(collection.size(), Eq(static_cast<size_t>(1)));
+}
+
+TEST(NamesCollectionTest, GetKnownSizeReturnsForRemovedNames) {
+ NamesCollection collection(std::vector<std::string>{});
+
+ size_t alice_index = collection.AddIfAbsent("alice");
+ EXPECT_THAT(collection.GetKnownSize(), Eq(static_cast<size_t>(1)));
+
+ EXPECT_THAT(collection.RemoveIfPresent("alice"),
+ Eq(absl::optional<size_t>(alice_index)));
+ EXPECT_THAT(collection.GetKnownSize(), Eq(static_cast<size_t>(1)));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
new file mode 100644
index 0000000000..b958f4d027
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc
@@ -0,0 +1,272 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
+
+#include <cstdint>
+#include <cstring>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/logging.h"
+#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+QualityAnalyzingVideoDecoder::QualityAnalyzingVideoDecoder(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoDecoder> delegate,
+ EncodedImageDataExtractor* extractor,
+ VideoQualityAnalyzerInterface* analyzer)
+ : peer_name_(peer_name),
+ implementation_name_("AnalyzingDecoder-" +
+ std::string(delegate->ImplementationName())),
+ delegate_(std::move(delegate)),
+ extractor_(extractor),
+ analyzer_(analyzer) {
+ analyzing_callback_ = std::make_unique<DecoderCallback>(this);
+}
+QualityAnalyzingVideoDecoder::~QualityAnalyzingVideoDecoder() = default;
+
+bool QualityAnalyzingVideoDecoder::Configure(const Settings& settings) {
+ {
+ MutexLock lock(&mutex_);
+ codec_name_ = std::string(CodecTypeToPayloadString(settings.codec_type())) +
+ "_" + delegate_->GetDecoderInfo().implementation_name;
+ }
+ return delegate_->Configure(settings);
+}
+
+int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ // Image extractor extracts id from provided EncodedImage and also returns
+ // the image with the original buffer. Buffer can be modified in place, so
+ // owner of original buffer will be responsible for deleting it, or extractor
+ // can create a new buffer. In such case extractor will be responsible for
+ // deleting it.
+ EncodedImageExtractionResult out = extractor_->ExtractData(input_image);
+
+ if (out.discard) {
+ // To partly emulate behavior of Selective Forwarding Unit (SFU) in the
+ // test, on receiver side we will "discard" frames from irrelevant streams.
+ // When all encoded images were marked to discarded, black frame have to be
+ // returned. Because simulcast streams will be received by receiver as 3
+ // different independent streams we don't want that irrelevant streams
+ // affect video quality metrics and also we don't want to use CPU time to
+ // decode them to prevent regressions on relevant streams. Also we can't
+ // just drop frame, because in such case, receiving part will be confused
+ // with all frames missing and will request a key frame, which will result
+ // into extra load on network and sender side. Because of it, discarded
+ // image will be always decoded as black frame and will be passed to
+ // callback directly without reaching decoder and video quality analyzer.
+ //
+ // For more details see QualityAnalyzingVideoEncoder.
+ return analyzing_callback_->IrrelevantSimulcastStreamDecoded(
+ out.id.value_or(VideoFrame::kNotSetId), input_image.Timestamp());
+ }
+
+ EncodedImage* origin_image;
+ {
+ MutexLock lock(&mutex_);
+ // Store id to be able to retrieve it in analyzing callback.
+ timestamp_to_frame_id_.insert({input_image.Timestamp(), out.id});
+ // Store encoded image to prevent its destruction while it is used in
+ // decoder.
+ origin_image = &(
+ decoding_images_.insert({input_image.Timestamp(), std::move(out.image)})
+ .first->second);
+ }
+ // We can safely dereference `origin_image`, because it can be removed from
+ // the map only after `delegate_` Decode method will be invoked. Image will
+ // be removed inside DecodedImageCallback, which can be done on separate
+ // thread.
+ analyzer_->OnFramePreDecode(
+ peer_name_, out.id.value_or(VideoFrame::kNotSetId), *origin_image);
+ int32_t result =
+ delegate_->Decode(*origin_image, missing_frames, render_time_ms);
+ if (result != WEBRTC_VIDEO_CODEC_OK) {
+ // If delegate decoder failed, then cleanup data for this image.
+ VideoQualityAnalyzerInterface::DecoderStats stats;
+ {
+ MutexLock lock(&mutex_);
+ timestamp_to_frame_id_.erase(input_image.Timestamp());
+ decoding_images_.erase(input_image.Timestamp());
+ stats.decoder_name = codec_name_;
+ }
+ analyzer_->OnDecoderError(
+ peer_name_, out.id.value_or(VideoFrame::kNotSetId), result, stats);
+ }
+ return result;
+}
+
+int32_t QualityAnalyzingVideoDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ analyzing_callback_->SetDelegateCallback(callback);
+ return delegate_->RegisterDecodeCompleteCallback(analyzing_callback_.get());
+}
+
+int32_t QualityAnalyzingVideoDecoder::Release() {
+ // Release decoder first. During release process it can still decode some
+ // frames, so we don't take a lock to prevent deadlock.
+ int32_t result = delegate_->Release();
+
+ MutexLock lock(&mutex_);
+ analyzing_callback_->SetDelegateCallback(nullptr);
+ timestamp_to_frame_id_.clear();
+ decoding_images_.clear();
+ return result;
+}
+
+VideoDecoder::DecoderInfo QualityAnalyzingVideoDecoder::GetDecoderInfo() const {
+ DecoderInfo info = delegate_->GetDecoderInfo();
+ info.implementation_name = implementation_name_;
+ return info;
+}
+
+const char* QualityAnalyzingVideoDecoder::ImplementationName() const {
+ return implementation_name_.c_str();
+}
+
+QualityAnalyzingVideoDecoder::DecoderCallback::DecoderCallback(
+ QualityAnalyzingVideoDecoder* decoder)
+ : decoder_(decoder), delegate_callback_(nullptr) {}
+QualityAnalyzingVideoDecoder::DecoderCallback::~DecoderCallback() = default;
+
+void QualityAnalyzingVideoDecoder::DecoderCallback::SetDelegateCallback(
+ DecodedImageCallback* delegate) {
+ MutexLock lock(&callback_mutex_);
+ delegate_callback_ = delegate;
+}
+
+// We have to implement all next 3 methods because we don't know which one
+// exactly is implemented in `delegate_callback_`, so we need to call the same
+// method on `delegate_callback_`, as was called on `this` callback.
+int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
+ VideoFrame& decodedImage) {
+ decoder_->OnFrameDecoded(&decodedImage, /*decode_time_ms=*/absl::nullopt,
+ /*qp=*/absl::nullopt);
+
+ MutexLock lock(&callback_mutex_);
+ RTC_DCHECK(delegate_callback_);
+ return delegate_callback_->Decoded(decodedImage);
+}
+
+int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
+ VideoFrame& decodedImage,
+ int64_t decode_time_ms) {
+ decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, /*qp=*/absl::nullopt);
+
+ MutexLock lock(&callback_mutex_);
+ RTC_DCHECK(delegate_callback_);
+ return delegate_callback_->Decoded(decodedImage, decode_time_ms);
+}
+
+void QualityAnalyzingVideoDecoder::DecoderCallback::Decoded(
+ VideoFrame& decodedImage,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) {
+ decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, qp);
+
+ MutexLock lock(&callback_mutex_);
+ RTC_DCHECK(delegate_callback_);
+ delegate_callback_->Decoded(decodedImage, decode_time_ms, qp);
+}
+
+int32_t
+QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded(
+ uint16_t frame_id,
+ uint32_t timestamp_ms) {
+ webrtc::VideoFrame dummy_frame =
+ webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(GetDummyFrameBuffer())
+ .set_timestamp_rtp(timestamp_ms)
+ .set_id(frame_id)
+ .build();
+ MutexLock lock(&callback_mutex_);
+ RTC_DCHECK(delegate_callback_);
+ delegate_callback_->Decoded(dummy_frame, absl::nullopt, absl::nullopt);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+QualityAnalyzingVideoDecoder::DecoderCallback::GetDummyFrameBuffer() {
+ if (!dummy_frame_buffer_) {
+ dummy_frame_buffer_ = CreateDummyFrameBuffer();
+ }
+
+ return dummy_frame_buffer_;
+}
+
+void QualityAnalyzingVideoDecoder::OnFrameDecoded(
+ VideoFrame* frame,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) {
+ absl::optional<uint16_t> frame_id;
+ std::string codec_name;
+ {
+ MutexLock lock(&mutex_);
+ auto it = timestamp_to_frame_id_.find(frame->timestamp());
+ if (it == timestamp_to_frame_id_.end()) {
+ // Ensure, that we have info about this frame. It can happen that for some
+ // reasons decoder response, that it failed to decode, when we were
+ // posting frame to it, but then call the callback for this frame.
+ RTC_LOG(LS_ERROR) << "QualityAnalyzingVideoDecoder::OnFrameDecoded: No "
+ "frame id for frame for frame->timestamp()="
+ << frame->timestamp();
+ return;
+ }
+ frame_id = it->second;
+ timestamp_to_frame_id_.erase(it);
+ decoding_images_.erase(frame->timestamp());
+ codec_name = codec_name_;
+ }
+ // Set frame id to the value, that was extracted from corresponding encoded
+ // image.
+ frame->set_id(frame_id.value_or(VideoFrame::kNotSetId));
+ VideoQualityAnalyzerInterface::DecoderStats stats;
+ stats.decoder_name = codec_name;
+ stats.decode_time_ms = decode_time_ms;
+ analyzer_->OnFrameDecoded(peer_name_, *frame, stats);
+}
+
+QualityAnalyzingVideoDecoderFactory::QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoDecoderFactory> delegate,
+ EncodedImageDataExtractor* extractor,
+ VideoQualityAnalyzerInterface* analyzer)
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
+ extractor_(extractor),
+ analyzer_(analyzer) {}
+QualityAnalyzingVideoDecoderFactory::~QualityAnalyzingVideoDecoderFactory() =
+ default;
+
+std::vector<SdpVideoFormat>
+QualityAnalyzingVideoDecoderFactory::GetSupportedFormats() const {
+ return delegate_->GetSupportedFormats();
+}
+
+std::unique_ptr<VideoDecoder>
+QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder(
+ const SdpVideoFormat& format) {
+ std::unique_ptr<VideoDecoder> decoder = delegate_->CreateVideoDecoder(format);
+ return std::make_unique<QualityAnalyzingVideoDecoder>(
+ peer_name_, std::move(decoder), extractor_, analyzer_);
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
new file mode 100644
index 0000000000..a86f4196b0
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h
@@ -0,0 +1,153 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_DECODER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_DECODER_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// QualityAnalyzingVideoDecoder is used to wrap origin video decoder and inject
+// VideoQualityAnalyzerInterface before and after decoder.
+//
+// QualityAnalyzingVideoDecoder propagates all calls to the origin decoder.
+// It registers its own DecodedImageCallback in the origin decoder and will
+// store user specified callback inside itself.
+//
+// When Decode(...) will be invoked, quality decoder first will extract frame id
+// from passed EncodedImage with EncodedImageIdExtracor that was specified in
+// constructor, then will call video quality analyzer, with correct
+// EncodedImage and only then will pass image to origin decoder.
+//
+// When origin decoder decodes the image it will call quality decoder's special
+// callback, where video analyzer will be called again and then decoded frame
+// will be passed to origin callback, provided by user.
+//
+// Quality decoder registers its own callback in origin decoder, at the same
+// time the user registers their callback in quality decoder.
+class QualityAnalyzingVideoDecoder : public VideoDecoder {
+ public:
+ QualityAnalyzingVideoDecoder(absl::string_view peer_name,
+ std::unique_ptr<VideoDecoder> delegate,
+ EncodedImageDataExtractor* extractor,
+ VideoQualityAnalyzerInterface* analyzer);
+ ~QualityAnalyzingVideoDecoder() override;
+
+ // Methods of VideoDecoder interface.
+ bool Configure(const Settings& settings) override;
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+ int32_t Release() override;
+ DecoderInfo GetDecoderInfo() const override;
+ const char* ImplementationName() const override;
+
+ private:
+ class DecoderCallback : public DecodedImageCallback {
+ public:
+ explicit DecoderCallback(QualityAnalyzingVideoDecoder* decoder);
+ ~DecoderCallback() override;
+
+ void SetDelegateCallback(DecodedImageCallback* delegate);
+
+ // Methods of DecodedImageCallback interface.
+ int32_t Decoded(VideoFrame& decodedImage) override;
+ int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) override;
+ void Decoded(VideoFrame& decodedImage,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) override;
+
+ int32_t IrrelevantSimulcastStreamDecoded(uint16_t frame_id,
+ uint32_t timestamp_ms);
+
+ private:
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> GetDummyFrameBuffer();
+
+ QualityAnalyzingVideoDecoder* const decoder_;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> dummy_frame_buffer_;
+
+ Mutex callback_mutex_;
+ DecodedImageCallback* delegate_callback_ RTC_GUARDED_BY(callback_mutex_);
+ };
+
+ void OnFrameDecoded(VideoFrame* frame,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp);
+
+ const std::string peer_name_;
+ const std::string implementation_name_;
+ std::unique_ptr<VideoDecoder> delegate_;
+ EncodedImageDataExtractor* const extractor_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+ std::unique_ptr<DecoderCallback> analyzing_callback_;
+
+ // VideoDecoder interface assumes async delivery of decoded video frames.
+ // This lock is used to protect shared state, that have to be propagated
+ // from received EncodedImage to resulted VideoFrame.
+ Mutex mutex_;
+
+ // Name of the video codec type used. Ex: VP8, VP9, H264 etc.
+ std::string codec_name_ RTC_GUARDED_BY(mutex_);
+ std::map<uint32_t, absl::optional<uint16_t>> timestamp_to_frame_id_
+ RTC_GUARDED_BY(mutex_);
+ // Stores currently being decoded images by timestamp. Because
+ // EncodedImageDataExtractor can create new copy on EncodedImage we need to
+ // ensure, that this image won't be deleted during async decoding. To do it
+ // all images are putted into this map and removed from here inside callback.
+ std::map<uint32_t, EncodedImage> decoding_images_ RTC_GUARDED_BY(mutex_);
+};
+
+// Produces QualityAnalyzingVideoDecoder, which hold decoders, produced by
+// specified factory as delegates. Forwards all other calls to specified
+// factory.
+class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory {
+ public:
+ QualityAnalyzingVideoDecoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoDecoderFactory> delegate,
+ EncodedImageDataExtractor* extractor,
+ VideoQualityAnalyzerInterface* analyzer);
+ ~QualityAnalyzingVideoDecoderFactory() override;
+
+ // Methods of VideoDecoderFactory interface.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ const std::string peer_name_;
+ std::unique_ptr<VideoDecoderFactory> delegate_;
+ EncodedImageDataExtractor* const extractor_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_DECODER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
new file mode 100644
index 0000000000..e814ba88b7
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc
@@ -0,0 +1,403 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
+
+#include <cmath>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/video/video_codec_type.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using EmulatedSFUConfigMap =
+ ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap;
+
+constexpr size_t kMaxFrameInPipelineCount = 1000;
+constexpr double kNoMultiplier = 1.0;
+constexpr double kEps = 1e-6;
+
+std::pair<uint32_t, uint32_t> GetMinMaxBitratesBps(const VideoCodec& codec,
+ size_t spatial_idx) {
+ uint32_t min_bitrate = codec.minBitrate;
+ uint32_t max_bitrate = codec.maxBitrate;
+ if (spatial_idx < codec.numberOfSimulcastStreams &&
+ codec.codecType != VideoCodecType::kVideoCodecVP9) {
+ min_bitrate =
+ std::max(min_bitrate, codec.simulcastStream[spatial_idx].minBitrate);
+ max_bitrate =
+ std::min(max_bitrate, codec.simulcastStream[spatial_idx].maxBitrate);
+ }
+ if (codec.codecType == VideoCodecType::kVideoCodecVP9 &&
+ spatial_idx < codec.VP9().numberOfSpatialLayers) {
+ min_bitrate =
+ std::max(min_bitrate, codec.spatialLayers[spatial_idx].minBitrate);
+ max_bitrate =
+ std::min(max_bitrate, codec.spatialLayers[spatial_idx].maxBitrate);
+ }
+ RTC_DCHECK_GT(max_bitrate, min_bitrate);
+ return {min_bitrate * 1000, max_bitrate * 1000};
+}
+
+} // namespace
+
+QualityAnalyzingVideoEncoder::QualityAnalyzingVideoEncoder(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoEncoder> delegate,
+ double bitrate_multiplier,
+ EmulatedSFUConfigMap stream_to_sfu_config,
+ EncodedImageDataInjector* injector,
+ VideoQualityAnalyzerInterface* analyzer)
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
+ bitrate_multiplier_(bitrate_multiplier),
+ stream_to_sfu_config_(std::move(stream_to_sfu_config)),
+ injector_(injector),
+ analyzer_(analyzer),
+ mode_(SimulcastMode::kNormal),
+ delegate_callback_(nullptr) {}
+QualityAnalyzingVideoEncoder::~QualityAnalyzingVideoEncoder() = default;
+
+void QualityAnalyzingVideoEncoder::SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) {
+ // Ignored.
+}
+
+int32_t QualityAnalyzingVideoEncoder::InitEncode(
+ const VideoCodec* codec_settings,
+ const Settings& settings) {
+ MutexLock lock(&mutex_);
+ codec_settings_ = *codec_settings;
+ mode_ = SimulcastMode::kNormal;
+ absl::optional<InterLayerPredMode> inter_layer_pred_mode;
+ if (codec_settings->GetScalabilityMode().has_value()) {
+ inter_layer_pred_mode = ScalabilityModeToInterLayerPredMode(
+ *codec_settings->GetScalabilityMode());
+ } else if (codec_settings->codecType == kVideoCodecVP9) {
+ if (codec_settings->VP9().numberOfSpatialLayers > 1) {
+ inter_layer_pred_mode = codec_settings->VP9().interLayerPred;
+ }
+ }
+ if (inter_layer_pred_mode.has_value()) {
+ switch (*inter_layer_pred_mode) {
+ case InterLayerPredMode::kOn:
+ mode_ = SimulcastMode::kSVC;
+ break;
+ case InterLayerPredMode::kOnKeyPic:
+ mode_ = SimulcastMode::kKSVC;
+ break;
+ case InterLayerPredMode::kOff:
+ mode_ = SimulcastMode::kSimulcast;
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED()
+ << "Unknown InterLayerPredMode value " << *inter_layer_pred_mode;
+ break;
+ }
+ }
+ if (codec_settings->numberOfSimulcastStreams > 1) {
+ mode_ = SimulcastMode::kSimulcast;
+ }
+ return delegate_->InitEncode(codec_settings, settings);
+}
+
+int32_t QualityAnalyzingVideoEncoder::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ // We need to get a lock here because delegate_callback can be hypothetically
+ // accessed from different thread (encoder one) concurrently.
+ MutexLock lock(&mutex_);
+ delegate_callback_ = callback;
+ return delegate_->RegisterEncodeCompleteCallback(this);
+}
+
+int32_t QualityAnalyzingVideoEncoder::Release() {
+ // Release encoder first. During release process it can still encode some
+ // frames, so we don't take a lock to prevent deadlock.
+ int32_t result = delegate_->Release();
+
+ MutexLock lock(&mutex_);
+ delegate_callback_ = nullptr;
+ return result;
+}
+
+int32_t QualityAnalyzingVideoEncoder::Encode(
+ const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) {
+ {
+ MutexLock lock(&mutex_);
+ // Store id to be able to retrieve it in analyzing callback.
+ timestamp_to_frame_id_list_.push_back({frame.timestamp(), frame.id()});
+ // If this list is growing, it means that we are not receiving new encoded
+ // images from encoder. So it should be a bug in setup on in the encoder.
+ RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount);
+ }
+ analyzer_->OnFramePreEncode(peer_name_, frame);
+ int32_t result = delegate_->Encode(frame, frame_types);
+ if (result != WEBRTC_VIDEO_CODEC_OK) {
+ // If origin encoder failed, then cleanup data for this frame.
+ {
+ MutexLock lock(&mutex_);
+ // The timestamp-frame_id pair can be not the last one, so we need to
+ // find it first and then remove. We will search from the end, because
+ // usually it will be the last or close to the last one.
+ auto it = timestamp_to_frame_id_list_.end();
+ while (it != timestamp_to_frame_id_list_.begin()) {
+ --it;
+ if (it->first == frame.timestamp()) {
+ timestamp_to_frame_id_list_.erase(it);
+ break;
+ }
+ }
+ }
+ analyzer_->OnEncoderError(peer_name_, frame, result);
+ }
+ return result;
+}
+
+void QualityAnalyzingVideoEncoder::SetRates(
+ const VideoEncoder::RateControlParameters& parameters) {
+ RTC_DCHECK_GT(bitrate_multiplier_, 0.0);
+ if (fabs(bitrate_multiplier_ - kNoMultiplier) < kEps) {
+ {
+ MutexLock lock(&mutex_);
+ bitrate_allocation_ = parameters.bitrate;
+ }
+ return delegate_->SetRates(parameters);
+ }
+
+ RateControlParameters adjusted_params = parameters;
+ {
+ MutexLock lock(&mutex_);
+ // Simulating encoder overshooting target bitrate, by configuring actual
+ // encoder too high. Take care not to adjust past limits of config,
+ // otherwise encoders may crash on DCHECK.
+ VideoBitrateAllocation multiplied_allocation;
+ for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
+ const uint32_t spatial_layer_bitrate_bps =
+ parameters.bitrate.GetSpatialLayerSum(si);
+ if (spatial_layer_bitrate_bps == 0) {
+ continue;
+ }
+
+ uint32_t min_bitrate_bps;
+ uint32_t max_bitrate_bps;
+ std::tie(min_bitrate_bps, max_bitrate_bps) =
+ GetMinMaxBitratesBps(codec_settings_, si);
+ double bitrate_multiplier = bitrate_multiplier_;
+ const uint32_t corrected_bitrate = rtc::checked_cast<uint32_t>(
+ bitrate_multiplier * spatial_layer_bitrate_bps);
+ if (corrected_bitrate < min_bitrate_bps) {
+ bitrate_multiplier = min_bitrate_bps / spatial_layer_bitrate_bps;
+ } else if (corrected_bitrate > max_bitrate_bps) {
+ bitrate_multiplier = max_bitrate_bps / spatial_layer_bitrate_bps;
+ }
+
+ for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
+ if (parameters.bitrate.HasBitrate(si, ti)) {
+ multiplied_allocation.SetBitrate(
+ si, ti,
+ rtc::checked_cast<uint32_t>(
+ bitrate_multiplier * parameters.bitrate.GetBitrate(si, ti)));
+ }
+ }
+ }
+
+ adjusted_params.bitrate = multiplied_allocation;
+ bitrate_allocation_ = adjusted_params.bitrate;
+ }
+ return delegate_->SetRates(adjusted_params);
+}
+
+VideoEncoder::EncoderInfo QualityAnalyzingVideoEncoder::GetEncoderInfo() const {
+ return delegate_->GetEncoderInfo();
+}
+
+// It is assumed, that encoded callback will be always invoked with encoded
+// images that correspond to the frames in the same sequence, that frames
+// arrived. In other words, assume we have frames F1, F2 and F3 and they have
+// corresponding encoded images I1, I2 and I3. In such case if we will call
+// encode first with F1, then with F2 and then with F3, then encoder callback
+// will be called first with all spatial layers for F1 (I1), then F2 (I2) and
+// then F3 (I3).
+//
+// Basing on it we will use a list of timestamp-frame_id pairs like this:
+// 1. If current encoded image timestamp is equals to timestamp in the front
+// pair - pick frame id from that pair
+// 2. If current encoded image timestamp isn't equals to timestamp in the front
+// pair - remove the front pair and got to the step 1.
+EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) {
+ uint16_t frame_id;
+ bool discard = false;
+ uint32_t target_encode_bitrate = 0;
+ std::string codec_name;
+ {
+ MutexLock lock(&mutex_);
+ std::pair<uint32_t, uint16_t> timestamp_frame_id;
+ while (!timestamp_to_frame_id_list_.empty()) {
+ timestamp_frame_id = timestamp_to_frame_id_list_.front();
+ if (timestamp_frame_id.first == encoded_image.Timestamp()) {
+ break;
+ }
+ timestamp_to_frame_id_list_.pop_front();
+ }
+
+ // After the loop the first element should point to current `encoded_image`
+ // frame id. We don't remove it from the list, because there may be
+ // multiple spatial layers for this frame, so encoder can produce more
+ // encoded images with this timestamp. The first element will be removed
+ // when the next frame would be encoded and EncodedImageCallback would be
+ // called with the next timestamp.
+
+ if (timestamp_to_frame_id_list_.empty()) {
+ // Ensure, that we have info about this frame. It can happen that for some
+ // reasons encoder response, that he failed to decode, when we were
+ // posting frame to it, but then call the callback for this frame.
+ RTC_LOG(LS_ERROR) << "QualityAnalyzingVideoEncoder::OnEncodedImage: No "
+ "frame id for encoded_image.Timestamp()="
+ << encoded_image.Timestamp();
+ return EncodedImageCallback::Result(
+ EncodedImageCallback::Result::Error::OK);
+ }
+ frame_id = timestamp_frame_id.second;
+
+ discard = ShouldDiscard(frame_id, encoded_image);
+ if (!discard) {
+ target_encode_bitrate = bitrate_allocation_.GetSpatialLayerSum(
+ encoded_image.SpatialIndex().value_or(0));
+ }
+ codec_name =
+ std::string(CodecTypeToPayloadString(codec_settings_.codecType)) + "_" +
+ delegate_->GetEncoderInfo().implementation_name;
+ }
+
+ VideoQualityAnalyzerInterface::EncoderStats stats;
+ stats.encoder_name = codec_name;
+ stats.target_encode_bitrate = target_encode_bitrate;
+ stats.qp = encoded_image.qp_;
+ analyzer_->OnFrameEncoded(peer_name_, frame_id, encoded_image, stats,
+ discard);
+
+ // Image data injector injects frame id and discard flag into provided
+ // EncodedImage and returns the image with a) modified original buffer (in
+ // such case the current owner of the buffer will be responsible for deleting
+ // it) or b) a new buffer (in such case injector will be responsible for
+ // deleting it).
+ const EncodedImage& image =
+ injector_->InjectData(frame_id, discard, encoded_image);
+ {
+ MutexLock lock(&mutex_);
+ RTC_DCHECK(delegate_callback_);
+ return delegate_callback_->OnEncodedImage(image, codec_specific_info);
+ }
+}
+
+void QualityAnalyzingVideoEncoder::OnDroppedFrame(
+ EncodedImageCallback::DropReason reason) {
+ MutexLock lock(&mutex_);
+ analyzer_->OnFrameDropped(peer_name_, reason);
+ RTC_DCHECK(delegate_callback_);
+ delegate_callback_->OnDroppedFrame(reason);
+}
+
+bool QualityAnalyzingVideoEncoder::ShouldDiscard(
+ uint16_t frame_id,
+ const EncodedImage& encoded_image) {
+ std::string stream_label = analyzer_->GetStreamLabel(frame_id);
+ EmulatedSFUConfigMap::mapped_type emulated_sfu_config =
+ stream_to_sfu_config_[stream_label];
+
+ if (!emulated_sfu_config)
+ return false;
+
+ int cur_spatial_index = encoded_image.SpatialIndex().value_or(0);
+ int cur_temporal_index = encoded_image.TemporalIndex().value_or(0);
+
+ if (emulated_sfu_config->target_temporal_index &&
+ cur_temporal_index > *emulated_sfu_config->target_temporal_index)
+ return true;
+
+ if (emulated_sfu_config->target_layer_index) {
+ switch (mode_) {
+ case SimulcastMode::kSimulcast:
+ // In simulcast mode only encoded images with required spatial index are
+ // interested, so all others have to be discarded.
+ return cur_spatial_index != *emulated_sfu_config->target_layer_index;
+ case SimulcastMode::kSVC:
+ // In SVC mode encoded images with spatial indexes that are equal or
+ // less than required one are interesting, so all above have to be
+ // discarded.
+ return cur_spatial_index > *emulated_sfu_config->target_layer_index;
+ case SimulcastMode::kKSVC:
+ // In KSVC mode for key frame encoded images with spatial indexes that
+ // are equal or less than required one are interesting, so all above
+ // have to be discarded. For other frames only required spatial index
+ // is interesting, so all others except the ones depending on the
+ // keyframes can be discarded. There's no good test for that, so we keep
+ // all of temporal layer 0 for now.
+ if (encoded_image._frameType == VideoFrameType::kVideoFrameKey ||
+ cur_temporal_index == 0)
+ return cur_spatial_index > *emulated_sfu_config->target_layer_index;
+ return cur_spatial_index != *emulated_sfu_config->target_layer_index;
+ case SimulcastMode::kNormal:
+ RTC_DCHECK_NOTREACHED() << "Analyzing encoder is in kNormal mode, but "
+ "target_layer_index is set";
+ }
+ }
+ return false;
+}
+
+QualityAnalyzingVideoEncoderFactory::QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoEncoderFactory> delegate,
+ double bitrate_multiplier,
+ EmulatedSFUConfigMap stream_to_sfu_config,
+ EncodedImageDataInjector* injector,
+ VideoQualityAnalyzerInterface* analyzer)
+ : peer_name_(peer_name),
+ delegate_(std::move(delegate)),
+ bitrate_multiplier_(bitrate_multiplier),
+ stream_to_sfu_config_(std::move(stream_to_sfu_config)),
+ injector_(injector),
+ analyzer_(analyzer) {}
+QualityAnalyzingVideoEncoderFactory::~QualityAnalyzingVideoEncoderFactory() =
+ default;
+
+std::vector<SdpVideoFormat>
+QualityAnalyzingVideoEncoderFactory::GetSupportedFormats() const {
+ return delegate_->GetSupportedFormats();
+}
+
+VideoEncoderFactory::CodecSupport
+QualityAnalyzingVideoEncoderFactory::QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const {
+ return delegate_->QueryCodecSupport(format, scalability_mode);
+}
+
+std::unique_ptr<VideoEncoder>
+QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder(
+ const SdpVideoFormat& format) {
+ return std::make_unique<QualityAnalyzingVideoEncoder>(
+ peer_name_, delegate_->CreateVideoEncoder(format), bitrate_multiplier_,
+ stream_to_sfu_config_, injector_, analyzer_);
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
new file mode 100644
index 0000000000..4adeacc0cd
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h
@@ -0,0 +1,194 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_ENCODER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_ENCODER_H_
+
+#include <list>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// QualityAnalyzingVideoEncoder is used to wrap origin video encoder and inject
+// VideoQualityAnalyzerInterface before and after encoder.
+//
+// QualityAnalyzingVideoEncoder propagates all calls to the origin encoder.
+// It registers its own EncodedImageCallback in the origin encoder and will
+// store user specified callback inside itself.
+//
+// When Encode(...) will be invoked, quality encoder first calls video quality
+// analyzer with original frame, then encodes frame with original encoder.
+//
+// When origin encoder encodes the image it will call quality encoder's special
+// callback, where video analyzer will be called again and then frame id will be
+// injected into EncodedImage with passed EncodedImageDataInjector. Then new
+// EncodedImage will be passed to origin callback, provided by user.
+//
+// Quality encoder registers its own callback in origin encoder, at the same
+// time the user registers their callback in quality encoder.
+class QualityAnalyzingVideoEncoder : public VideoEncoder,
+ public EncodedImageCallback {
+ public:
+ using EmulatedSFUConfigMap =
+ std::map<std::string, absl::optional<EmulatedSFUConfig>>;
+
+ QualityAnalyzingVideoEncoder(absl::string_view peer_name,
+ std::unique_ptr<VideoEncoder> delegate,
+ double bitrate_multiplier,
+ EmulatedSFUConfigMap stream_to_sfu_config,
+ EncodedImageDataInjector* injector,
+ VideoQualityAnalyzerInterface* analyzer);
+ ~QualityAnalyzingVideoEncoder() override;
+
+ // Methods of VideoEncoder interface.
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override;
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) override;
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) override;
+ void SetRates(const VideoEncoder::RateControlParameters& parameters) override;
+ EncoderInfo GetEncoderInfo() const override;
+
+ // Methods of EncodedImageCallback interface.
+ EncodedImageCallback::Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) override;
+ void OnDroppedFrame(DropReason reason) override;
+
+ private:
+ enum SimulcastMode {
+ // In this mode encoder assumes not more than 1 encoded image per video
+ // frame
+ kNormal,
+
+ // Next modes are to test video conference behavior. For conference sender
+ // will send multiple spatial layers/simulcast streams for single video
+ // track and there is some Selective Forwarding Unit (SFU), that forwards
+ // only best one, that will pass through downlink to the receiver.
+ //
+ // Here this behavior will be partly emulated. Sender will send all spatial
+ // layers/simulcast streams and then some of them will be filtered out on
+ // the receiver side. During test setup user can specify which spatial
+ // layer/simulcast stream is required, what will simulated which spatial
+ // layer/simulcast stream will be chosen by SFU in the real world. Then
+ // sender will mark encoded images for all spatial layers above required or
+ // all simulcast streams except required as to be discarded and on receiver
+ // side they will be discarded in quality analyzing decoder and won't be
+ // passed into delegate decoder.
+ //
+ // If the sender for some reasons won't send specified spatial layer, then
+ // receiver still will fall back on lower spatial layers. But for simulcast
+ // streams if required one won't be sent, receiver will assume all frames
+ // in that period as dropped and will experience video freeze.
+ //
+ // Test based on this simulation will be used to evaluate video quality
+ // of concrete spatial layers/simulcast streams and also check distribution
+ // of bandwidth between spatial layers/simulcast streams by BWE.
+
+ // In this mode encoder assumes that for each frame simulcast encoded
+ // images will be produced. So all simulcast streams except required will
+ // be marked as to be discarded in decoder and won't reach video quality
+ // analyzer.
+ kSimulcast,
+ // In this mode encoder assumes that for each frame encoded images for
+ // different spatial layers will be produced. So all spatial layers above
+ // required will be marked to be discarded in decoder and won't reach
+ // video quality analyzer.
+ kSVC,
+ // In this mode encoder assumes that for each frame encoded images for
+ // different spatial layers will be produced. Compared to kSVC mode
+ // spatial layers that are above required will be marked to be discarded
+ // only for key frames and for regular frames all except required spatial
+ // layer will be marked as to be discarded in decoder and won't reach video
+ // quality analyzer.
+ kKSVC
+ };
+
+ bool ShouldDiscard(uint16_t frame_id, const EncodedImage& encoded_image)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
+ const std::string peer_name_;
+ std::unique_ptr<VideoEncoder> delegate_;
+ const double bitrate_multiplier_;
+ // Contains mapping from stream label to optional spatial index.
+ // If we have stream label "Foo" and mapping contains
+ // 1. `absl::nullopt` means all streams are required
+ // 2. Concrete value means that particular simulcast/SVC stream have to be
+ // analyzed.
+ EmulatedSFUConfigMap stream_to_sfu_config_;
+ EncodedImageDataInjector* const injector_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+
+ // VideoEncoder interface assumes async delivery of encoded images.
+ // This lock is used to protect shared state, that have to be propagated
+ // from received VideoFrame to resulted EncodedImage.
+ Mutex mutex_;
+
+ VideoCodec codec_settings_ RTC_GUARDED_BY(mutex_);
+ SimulcastMode mode_ RTC_GUARDED_BY(mutex_);
+ EncodedImageCallback* delegate_callback_ RTC_GUARDED_BY(mutex_);
+ std::list<std::pair<uint32_t, uint16_t>> timestamp_to_frame_id_list_
+ RTC_GUARDED_BY(mutex_);
+ VideoBitrateAllocation bitrate_allocation_ RTC_GUARDED_BY(mutex_);
+};
+
+// Produces QualityAnalyzingVideoEncoder, which hold decoders, produced by
+// specified factory as delegates. Forwards all other calls to specified
+// factory.
+class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ QualityAnalyzingVideoEncoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoEncoderFactory> delegate,
+ double bitrate_multiplier,
+ QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap stream_to_sfu_config,
+ EncodedImageDataInjector* injector,
+ VideoQualityAnalyzerInterface* analyzer);
+ ~QualityAnalyzingVideoEncoderFactory() override;
+
+ // Methods of VideoEncoderFactory interface.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ VideoEncoderFactory::CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const override;
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ const std::string peer_name_;
+ std::unique_ptr<VideoEncoderFactory> delegate_;
+ const double bitrate_multiplier_;
+ QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap stream_to_sfu_config_;
+ EncodedImageDataInjector* const injector_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_QUALITY_ANALYZING_VIDEO_ENCODER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc
new file mode 100644
index 0000000000..7a73b9f4f1
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+constexpr char kIrrelatedSimulcastStreamFrameData[] = "Dummy!";
+
+} // namespace
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateDummyFrameBuffer() {
+ // Use i420 buffer here as default one and supported by all codecs.
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ webrtc::I420Buffer::Create(2, 2);
+ memcpy(buffer->MutableDataY(), kIrrelatedSimulcastStreamFrameData, 2);
+ memcpy(buffer->MutableDataY() + buffer->StrideY(),
+ kIrrelatedSimulcastStreamFrameData + 2, 2);
+ memcpy(buffer->MutableDataU(), kIrrelatedSimulcastStreamFrameData + 4, 1);
+ memcpy(buffer->MutableDataV(), kIrrelatedSimulcastStreamFrameData + 5, 1);
+ return buffer;
+}
+
+bool IsDummyFrame(const webrtc::VideoFrame& video_frame) {
+ if (video_frame.width() != 2 || video_frame.height() != 2) {
+ return false;
+ }
+ rtc::scoped_refptr<webrtc::I420BufferInterface> buffer =
+ video_frame.video_frame_buffer()->ToI420();
+ if (memcmp(buffer->DataY(), kIrrelatedSimulcastStreamFrameData, 2) != 0) {
+ return false;
+ }
+ if (memcmp(buffer->DataY() + buffer->StrideY(),
+ kIrrelatedSimulcastStreamFrameData + 2, 2) != 0) {
+ return false;
+ }
+ if (memcmp(buffer->DataU(), kIrrelatedSimulcastStreamFrameData + 4, 1) != 0) {
+ return false;
+ }
+ if (memcmp(buffer->DataV(), kIrrelatedSimulcastStreamFrameData + 5, 1) != 0) {
+ return false;
+ }
+ return true;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h
new file mode 100644
index 0000000000..8ecfae7385
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_
+
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Creates a special video frame buffer that should be used to create frames
+// during Selective Forwarding Unit (SFU) emulation. Such frames are used when
+// original was discarded and some frame is required to be passed upstream
+// to make WebRTC pipeline happy and not request key frame on the received
+// stream due to lack of incoming frames.
+rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateDummyFrameBuffer();
+
+// Tests if provided frame contains a buffer created by
+// `CreateDummyFrameBuffer`.
+bool IsDummyFrame(const webrtc::VideoFrame& video_frame);
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_SIMULCAST_DUMMY_BUFFER_HELPER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc
new file mode 100644
index 0000000000..db1030232d
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/random.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+uint8_t RandByte(Random& random) {
+ return random.Rand(255);
+}
+
+VideoFrame CreateRandom2x2VideoFrame(uint16_t id, Random& random) {
+ rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(2, 2);
+
+ uint8_t data[6] = {RandByte(random), RandByte(random), RandByte(random),
+ RandByte(random), RandByte(random), RandByte(random)};
+
+ memcpy(buffer->MutableDataY(), data, 2);
+ memcpy(buffer->MutableDataY() + buffer->StrideY(), data + 2, 2);
+ memcpy(buffer->MutableDataU(), data + 4, 1);
+ memcpy(buffer->MutableDataV(), data + 5, 1);
+
+ return VideoFrame::Builder()
+ .set_id(id)
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_us(1)
+ .build();
+}
+
+TEST(CreateDummyFrameBufferTest, CreatedBufferIsDummy) {
+ VideoFrame dummy_frame = VideoFrame::Builder()
+ .set_video_frame_buffer(CreateDummyFrameBuffer())
+ .build();
+
+ EXPECT_TRUE(IsDummyFrame(dummy_frame));
+}
+
+TEST(IsDummyFrameTest, NotEveryFrameIsDummy) {
+ Random random(/*seed=*/100);
+ VideoFrame frame = CreateRandom2x2VideoFrame(1, random);
+ EXPECT_FALSE(IsDummyFrame(frame));
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
new file mode 100644
index 0000000000..ccd2f03537
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h"
+
+#include <algorithm>
+#include <cstddef>
+
+#include "absl/memory/memory.h"
+#include "api/video/encoded_image.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+SingleProcessEncodedImageDataInjector::SingleProcessEncodedImageDataInjector() =
+ default;
+SingleProcessEncodedImageDataInjector::
+ ~SingleProcessEncodedImageDataInjector() = default;
+
+EncodedImage SingleProcessEncodedImageDataInjector::InjectData(
+ uint16_t id,
+ bool discard,
+ const EncodedImage& source) {
+ RTC_CHECK(source.size() >= ExtractionInfo::kUsedBufferSize);
+
+ ExtractionInfo info;
+ info.discard = discard;
+ size_t insertion_pos = source.size() - ExtractionInfo::kUsedBufferSize;
+ memcpy(info.origin_data, &source.data()[insertion_pos],
+ ExtractionInfo::kUsedBufferSize);
+ {
+ MutexLock lock(&lock_);
+ // Will create new one if missed.
+ ExtractionInfoVector& ev = extraction_cache_[id];
+ info.sub_id = ev.next_sub_id++;
+ ev.infos[info.sub_id] = info;
+ }
+
+ auto buffer = EncodedImageBuffer::Create(source.data(), source.size());
+ buffer->data()[insertion_pos] = id & 0x00ff;
+ buffer->data()[insertion_pos + 1] = (id & 0xff00) >> 8;
+ buffer->data()[insertion_pos + 2] = info.sub_id;
+
+ EncodedImage out = source;
+ out.SetEncodedData(buffer);
+ return out;
+}
+
+void SingleProcessEncodedImageDataInjector::AddParticipantInCall() {
+ MutexLock crit(&lock_);
+ expected_receivers_count_++;
+}
+
+void SingleProcessEncodedImageDataInjector::RemoveParticipantInCall() {
+ MutexLock crit(&lock_);
+ expected_receivers_count_--;
+ // Now we need go over `extraction_cache_` and removed frames which have been
+ // received by `expected_receivers_count_`.
+ for (auto& [frame_id, extraction_infos] : extraction_cache_) {
+ for (auto it = extraction_infos.infos.begin();
+ it != extraction_infos.infos.end();) {
+ // Frame is received if `received_count` equals to
+ // `expected_receivers_count_`.
+ if (it->second.received_count == expected_receivers_count_) {
+ it = extraction_infos.infos.erase(it);
+ } else {
+ ++it;
+ }
+ }
+ }
+}
+
+EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData(
+ const EncodedImage& source) {
+ size_t size = source.size();
+ auto buffer = EncodedImageBuffer::Create(source.data(), source.size());
+ EncodedImage out = source;
+ out.SetEncodedData(buffer);
+
+ std::vector<size_t> frame_sizes;
+ std::vector<size_t> frame_sl_index;
+ size_t max_spatial_index = out.SpatialIndex().value_or(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ auto frame_size = source.SpatialLayerFrameSize(i);
+ if (frame_size.value_or(0)) {
+ frame_sl_index.push_back(i);
+ frame_sizes.push_back(frame_size.value());
+ }
+ }
+ if (frame_sizes.empty()) {
+ frame_sizes.push_back(size);
+ }
+
+ size_t prev_frames_size = 0;
+ absl::optional<uint16_t> id = absl::nullopt;
+ bool discard = true;
+ std::vector<ExtractionInfo> extraction_infos;
+ for (size_t frame_size : frame_sizes) {
+ size_t insertion_pos =
+ prev_frames_size + frame_size - ExtractionInfo::kUsedBufferSize;
+ // Extract frame id from first 2 bytes starting from insertion pos.
+ uint16_t next_id = buffer->data()[insertion_pos] +
+ (buffer->data()[insertion_pos + 1] << 8);
+ // Extract frame sub id from second 3 byte starting from insertion pos.
+ uint8_t sub_id = buffer->data()[insertion_pos + 2];
+ RTC_CHECK(!id || *id == next_id)
+ << "Different frames encoded into single encoded image: " << *id
+ << " vs " << next_id;
+ id = next_id;
+ ExtractionInfo info;
+ {
+ MutexLock lock(&lock_);
+ auto ext_vector_it = extraction_cache_.find(next_id);
+ RTC_CHECK(ext_vector_it != extraction_cache_.end())
+ << "Unknown frame_id=" << next_id;
+
+ auto info_it = ext_vector_it->second.infos.find(sub_id);
+ RTC_CHECK(info_it != ext_vector_it->second.infos.end())
+ << "Unknown sub_id=" << sub_id << " for frame_id=" << next_id;
+ info_it->second.received_count++;
+ info = info_it->second;
+ if (info.received_count == expected_receivers_count_) {
+ ext_vector_it->second.infos.erase(info_it);
+ }
+ }
+ // We need to discard encoded image only if all concatenated encoded images
+ // have to be discarded.
+ discard = discard && info.discard;
+
+ extraction_infos.push_back(info);
+ prev_frames_size += frame_size;
+ }
+ RTC_CHECK(id);
+
+ if (discard) {
+ out.set_size(0);
+ for (size_t i = 0; i <= max_spatial_index; ++i) {
+ out.SetSpatialLayerFrameSize(i, 0);
+ }
+ return EncodedImageExtractionResult{*id, out, true};
+ }
+
+ // Make a pass from begin to end to restore origin payload and erase discarded
+ // encoded images.
+ size_t pos = 0;
+ for (size_t frame_index = 0; frame_index < frame_sizes.size();
+ ++frame_index) {
+ RTC_CHECK(pos < size);
+ const size_t frame_size = frame_sizes[frame_index];
+ const ExtractionInfo& info = extraction_infos[frame_index];
+ if (info.discard) {
+ // If this encoded image is marked to be discarded - erase it's payload
+ // from the buffer.
+ memmove(&buffer->data()[pos], &buffer->data()[pos + frame_size],
+ size - pos - frame_size);
+ RTC_CHECK_LT(frame_index, frame_sl_index.size())
+ << "codec doesn't support discard option or the image, that was "
+ "supposed to be discarded, is lost";
+ out.SetSpatialLayerFrameSize(frame_sl_index[frame_index], 0);
+ size -= frame_size;
+ } else {
+ memcpy(
+ &buffer->data()[pos + frame_size - ExtractionInfo::kUsedBufferSize],
+ info.origin_data, ExtractionInfo::kUsedBufferSize);
+ pos += frame_size;
+ }
+ }
+ out.set_size(pos);
+
+ return EncodedImageExtractionResult{*id, out, discard};
+}
+
+SingleProcessEncodedImageDataInjector::ExtractionInfoVector::
+ ExtractionInfoVector() = default;
+SingleProcessEncodedImageDataInjector::ExtractionInfoVector::
+ ~ExtractionInfoVector() = default;
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
new file mode 100644
index 0000000000..1082440e2f
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_SINGLE_PROCESS_ENCODED_IMAGE_DATA_INJECTOR_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_SINGLE_PROCESS_ENCODED_IMAGE_DATA_INJECTOR_H_
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/video/encoded_image.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Based on assumption that all call participants are in the same OS process
+// and uses same QualityAnalyzingVideoContext to obtain
+// EncodedImageDataInjector.
+//
+// To inject frame id and discard flag into EncodedImage injector uses last 3rd
+// and 2nd bytes of EncodedImage payload. Then it uses last byte for frame
+// sub id, that is required to distinguish different spatial layers. The origin
+// data from these 3 bytes will be stored inside injector's internal storage and
+// then will be restored during extraction phase.
+//
+// This injector won't add any extra overhead into EncodedImage payload and
+// support frames with any size of payload. Also assumes that every EncodedImage
+// payload size is greater or equals to 3 bytes
+//
+// This injector doesn't support video frames/encoded images without frame ID.
+class SingleProcessEncodedImageDataInjector
+ : public EncodedImageDataPropagator {
+ public:
+ SingleProcessEncodedImageDataInjector();
+ ~SingleProcessEncodedImageDataInjector() override;
+
+ // Id and discard flag will be injected into EncodedImage buffer directly.
+ // This buffer won't be fully copied, so `source` image buffer will be also
+ // changed.
+ EncodedImage InjectData(uint16_t id,
+ bool discard,
+ const EncodedImage& source) override;
+
+ void Start(int expected_receivers_count) override {
+ MutexLock crit(&lock_);
+ expected_receivers_count_ = expected_receivers_count;
+ }
+ void AddParticipantInCall() override;
+ void RemoveParticipantInCall() override;
+ EncodedImageExtractionResult ExtractData(const EncodedImage& source) override;
+
+ private:
+ // Contains data required to extract frame id from EncodedImage and restore
+ // original buffer.
+ struct ExtractionInfo {
+ // Number of bytes from the beginning of the EncodedImage buffer that will
+ // be used to store frame id and sub id.
+ const static size_t kUsedBufferSize = 3;
+ // Frame sub id to distinguish encoded images for different spatial layers.
+ uint8_t sub_id;
+ // Flag to show is this encoded images should be discarded by analyzing
+ // decoder because of not required spatial layer/simulcast stream.
+ bool discard;
+ // Data from first 3 bytes of origin encoded image's payload.
+ uint8_t origin_data[ExtractionInfo::kUsedBufferSize];
+ // Count of how many times this frame was received.
+ int received_count = 0;
+ };
+
+ struct ExtractionInfoVector {
+ ExtractionInfoVector();
+ ~ExtractionInfoVector();
+
+ // Next sub id, that have to be used for this frame id.
+ uint8_t next_sub_id = 0;
+ std::map<uint8_t, ExtractionInfo> infos;
+ };
+
+ Mutex lock_;
+ int expected_receivers_count_ RTC_GUARDED_BY(lock_);
+ // Stores a mapping from frame id to extraction info for spatial layers
+ // for this frame id. There can be a lot of them, because if frame was
+ // dropped we can't clean it up, because we won't receive a signal on
+ // decoder side about that frame. In such case it will be replaced
+ // when sub id will overlap.
+ std::map<uint16_t, ExtractionInfoVector> extraction_cache_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_SINGLE_PROCESS_ENCODED_IMAGE_DATA_INJECTOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
new file mode 100644
index 0000000000..f6fa40455a
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc
@@ -0,0 +1,445 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h"
+
+#include <utility>
+
+#include "api/video/encoded_image.h"
+#include "rtc_base/buffer.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+rtc::scoped_refptr<EncodedImageBuffer>
+CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) {
+ auto buffer = EncodedImageBuffer::Create(n);
+ for (size_t i = 0; i < n; ++i) {
+ buffer->data()[i] = static_cast<uint8_t>(x + i);
+ }
+ return buffer;
+}
+
+EncodedImage CreateEncodedImageOfSizeNFilledWithValuesFromX(size_t n,
+ uint8_t x) {
+ EncodedImage image;
+ image.SetEncodedData(
+ CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(n, x));
+ return image;
+}
+
+EncodedImage DeepCopyEncodedImage(const EncodedImage& source) {
+ EncodedImage copy = source;
+ copy.SetEncodedData(EncodedImageBuffer::Create(source.data(), source.size()));
+ return copy;
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardFalse) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImageExtractionResult out =
+ injector.ExtractData(injector.InjectData(512, false, source));
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardTrue) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImageExtractionResult out =
+ injector.ExtractData(injector.InjectData(512, true, source));
+ EXPECT_EQ(out.id, 512);
+ EXPECT_TRUE(out.discard);
+ EXPECT_EQ(out.image.size(), 0ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest,
+ InjectWithUnsetSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source);
+ intermediate.SetSpatialIndex(2);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest,
+ InjectWithZeroSpatialLayerSizes) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImage intermediate = injector.InjectData(512, false, source);
+ intermediate.SetSpatialIndex(2);
+ intermediate.SetSpatialLayerFrameSize(0, 0);
+ intermediate.SetSpatialLayerFrameSize(1, 0);
+ intermediate.SetSpatialLayerFrameSize(2, 0);
+
+ EncodedImageExtractionResult out = injector.ExtractData(intermediate);
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, Inject3Extract3) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ // 1st frame
+ EncodedImage source1 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source1.SetTimestamp(123456710);
+ // 2nd frame 1st spatial layer
+ EncodedImage source2 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
+ source2.SetTimestamp(123456720);
+ // 2nd frame 2nd spatial layer
+ EncodedImage source3 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
+ source3.SetTimestamp(123456720);
+
+ EncodedImage intermediate1 = injector.InjectData(510, false, source1);
+ EncodedImage intermediate2 = injector.InjectData(520, true, source2);
+ EncodedImage intermediate3 = injector.InjectData(520, false, source3);
+
+ // Extract ids in different order.
+ EncodedImageExtractionResult out3 = injector.ExtractData(intermediate3);
+ EncodedImageExtractionResult out1 = injector.ExtractData(intermediate1);
+ EncodedImageExtractionResult out2 = injector.ExtractData(intermediate2);
+
+ EXPECT_EQ(out1.id, 510);
+ EXPECT_FALSE(out1.discard);
+ EXPECT_EQ(out1.image.size(), 10ul);
+ EXPECT_EQ(out1.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out1.image.data()[i], i + 1);
+ }
+ EXPECT_EQ(out2.id, 520);
+ EXPECT_TRUE(out2.discard);
+ EXPECT_EQ(out2.image.size(), 0ul);
+ EXPECT_EQ(out2.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ EXPECT_EQ(out3.id, 520);
+ EXPECT_FALSE(out3.discard);
+ EXPECT_EQ(out3.image.size(), 10ul);
+ EXPECT_EQ(out3.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out3.image.data()[i], i + 21);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractFromConcatenated) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source1 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source1.SetTimestamp(123456710);
+ EncodedImage source2 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
+ source2.SetTimestamp(123456710);
+ EncodedImage source3 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
+ source3.SetTimestamp(123456710);
+
+ // Inject id into 3 images with same frame id.
+ EncodedImage intermediate1 = injector.InjectData(512, false, source1);
+ EncodedImage intermediate2 = injector.InjectData(512, true, source2);
+ EncodedImage intermediate3 = injector.InjectData(512, false, source3);
+
+ // Concatenate them into single encoded image, like it can be done in jitter
+ // buffer.
+ size_t concatenated_length =
+ intermediate1.size() + intermediate2.size() + intermediate3.size();
+ rtc::Buffer concatenated_buffer;
+ concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size());
+ concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size());
+ concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
+ EncodedImage concatenated;
+ concatenated.SetEncodedData(EncodedImageBuffer::Create(
+ concatenated_buffer.data(), concatenated_length));
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
+
+ // Extract frame id from concatenated image
+ EncodedImageExtractionResult out = injector.ExtractData(concatenated);
+
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 2 * 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ EXPECT_EQ(out.image.data()[i + 10], i + 21);
+ }
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(1).value_or(0), 0ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(2).value_or(0), 10ul);
+}
+
+TEST(SingleProcessEncodedImageDataInjector,
+ InjectExtractFromConcatenatedAllDiscarded) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/1);
+
+ EncodedImage source1 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source1.SetTimestamp(123456710);
+ EncodedImage source2 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11);
+ source2.SetTimestamp(123456710);
+ EncodedImage source3 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21);
+ source3.SetTimestamp(123456710);
+
+ // Inject id into 3 images with same frame id.
+ EncodedImage intermediate1 = injector.InjectData(512, true, source1);
+ EncodedImage intermediate2 = injector.InjectData(512, true, source2);
+ EncodedImage intermediate3 = injector.InjectData(512, true, source3);
+
+ // Concatenate them into single encoded image, like it can be done in jitter
+ // buffer.
+ size_t concatenated_length =
+ intermediate1.size() + intermediate2.size() + intermediate3.size();
+ rtc::Buffer concatenated_buffer;
+ concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size());
+ concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size());
+ concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size());
+ EncodedImage concatenated;
+ concatenated.SetEncodedData(EncodedImageBuffer::Create(
+ concatenated_buffer.data(), concatenated_length));
+ concatenated.SetSpatialIndex(2);
+ concatenated.SetSpatialLayerFrameSize(0, intermediate1.size());
+ concatenated.SetSpatialLayerFrameSize(1, intermediate2.size());
+ concatenated.SetSpatialLayerFrameSize(2, intermediate3.size());
+
+ // Extract frame id from concatenated image
+ EncodedImageExtractionResult out = injector.ExtractData(concatenated);
+
+ EXPECT_EQ(out.id, 512);
+ EXPECT_TRUE(out.discard);
+ EXPECT_EQ(out.image.size(), 0ul);
+ EXPECT_EQ(out.image.SpatialIndex().value_or(0), 2);
+ for (int i = 0; i < 3; ++i) {
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(i).value_or(0), 0ul);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, InjectOnceExtractTwice) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/2);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImageExtractionResult out = injector.ExtractData(
+ injector.InjectData(/*id=*/512, /*discard=*/false, source));
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+ out = injector.ExtractData(
+ injector.InjectData(/*id=*/512, /*discard=*/false, source));
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, Add1stReceiverAfterStart) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/0);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+ EncodedImage modified_image = injector.InjectData(
+ /*id=*/512, /*discard=*/false, source);
+
+ injector.AddParticipantInCall();
+ EncodedImageExtractionResult out = injector.ExtractData(modified_image);
+
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest, Add3rdReceiverAfterStart) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/2);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+ EncodedImage modified_image = injector.InjectData(
+ /*id=*/512, /*discard=*/false, source);
+ injector.ExtractData(modified_image);
+
+ injector.AddParticipantInCall();
+ injector.ExtractData(modified_image);
+ EncodedImageExtractionResult out = injector.ExtractData(modified_image);
+
+ EXPECT_EQ(out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTest,
+ RemoveReceiverRemovesOnlyFullyReceivedFrames) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/3);
+
+ EncodedImage source1 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source1.SetTimestamp(10);
+ EncodedImage source2 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source2.SetTimestamp(20);
+
+ EncodedImage modified_image1 = injector.InjectData(
+ /*id=*/512, /*discard=*/false, source1);
+ EncodedImage modified_image2 = injector.InjectData(
+ /*id=*/513, /*discard=*/false, source2);
+
+ // Out of 3 receivers 1st image received by 2 and 2nd image by 1
+ injector.ExtractData(DeepCopyEncodedImage(modified_image1));
+ injector.ExtractData(DeepCopyEncodedImage(modified_image1));
+ injector.ExtractData(DeepCopyEncodedImage(modified_image2));
+
+ // When we removed one receiver, 2nd image should still be available for
+ // extraction.
+ injector.RemoveParticipantInCall();
+
+ EncodedImageExtractionResult out =
+ injector.ExtractData(DeepCopyEncodedImage(modified_image2));
+
+ EXPECT_EQ(out.id, 513);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ EXPECT_EQ(out.image.SpatialLayerFrameSize(0).value_or(0), 0ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(out.image.data()[i], i + 1);
+ }
+}
+
+// Death tests.
+// Disabled on Android because death tests misbehave on Android, see
+// base/test/gtest_util.h.
+#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+TEST(SingleProcessEncodedImageDataInjectorTestDeathTest,
+ InjectOnceExtractMoreThenExpected) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/2);
+
+ EncodedImage source =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source.SetTimestamp(123456789);
+
+ EncodedImage modified =
+ injector.InjectData(/*id=*/512, /*discard=*/false, source);
+
+ injector.ExtractData(DeepCopyEncodedImage(modified));
+ injector.ExtractData(DeepCopyEncodedImage(modified));
+ EXPECT_DEATH(injector.ExtractData(DeepCopyEncodedImage(modified)),
+ "Unknown sub_id=0 for frame_id=512");
+}
+
+TEST(SingleProcessEncodedImageDataInjectorTestDeathTest,
+ RemoveReceiverRemovesOnlyFullyReceivedFramesVerifyFrameIsRemoved) {
+ SingleProcessEncodedImageDataInjector injector;
+ injector.Start(/*expected_receivers_count=*/3);
+
+ EncodedImage source1 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source1.SetTimestamp(10);
+ EncodedImage source2 =
+ CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1);
+ source2.SetTimestamp(20);
+
+ EncodedImage modified_image1 = injector.InjectData(
+ /*id=*/512, /*discard=*/false, source1);
+ EncodedImage modified_image2 = injector.InjectData(
+ /*id=*/513, /*discard=*/false, source2);
+
+ // Out of 3 receivers 1st image received by 2 and 2nd image by 1
+ injector.ExtractData(DeepCopyEncodedImage(modified_image1));
+ injector.ExtractData(DeepCopyEncodedImage(modified_image1));
+ injector.ExtractData(DeepCopyEncodedImage(modified_image2));
+
+ // When we removed one receiver 1st image should be removed.
+ injector.RemoveParticipantInCall();
+
+ EXPECT_DEATH(injector.ExtractData(DeepCopyEncodedImage(modified_image1)),
+ "Unknown sub_id=0 for frame_id=512");
+}
+#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.cc
new file mode 100644
index 0000000000..4fec0a8f9e
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/video_dumping.h"
+
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/clock.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+class VideoFrameIdsWriter final : public test::VideoFrameWriter {
+ public:
+ explicit VideoFrameIdsWriter(absl::string_view file_name)
+ : file_name_(file_name) {
+ output_file_ = fopen(file_name_.c_str(), "wb");
+ RTC_LOG(LS_INFO) << "Writing VideoFrame IDs into " << file_name_;
+ RTC_CHECK(output_file_ != nullptr)
+ << "Failed to open file to dump frame ids for writing: " << file_name_;
+ }
+ ~VideoFrameIdsWriter() override { Close(); }
+
+ bool WriteFrame(const VideoFrame& frame) override {
+ RTC_CHECK(output_file_ != nullptr) << "Writer is already closed";
+ int chars_written = fprintf(output_file_, "%d\n", frame.id());
+ if (chars_written < 2) {
+ RTC_LOG(LS_ERROR) << "Failed to write frame id to the output file: "
+ << file_name_;
+ return false;
+ }
+ return true;
+ }
+
+ void Close() override {
+ if (output_file_ != nullptr) {
+ RTC_LOG(LS_INFO) << "Closing file for VideoFrame IDs: " << file_name_;
+ fclose(output_file_);
+ output_file_ = nullptr;
+ }
+ }
+
+ private:
+ const std::string file_name_;
+ FILE* output_file_;
+};
+
+// Broadcast received frame to multiple underlying frame writers.
+class BroadcastingFrameWriter final : public test::VideoFrameWriter {
+ public:
+ explicit BroadcastingFrameWriter(
+ std::vector<std::unique_ptr<test::VideoFrameWriter>> delegates)
+ : delegates_(std::move(delegates)) {}
+ ~BroadcastingFrameWriter() override { Close(); }
+
+ bool WriteFrame(const webrtc::VideoFrame& frame) override {
+ for (auto& delegate : delegates_) {
+ if (!delegate->WriteFrame(frame)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ void Close() override {
+ for (auto& delegate : delegates_) {
+ delegate->Close();
+ }
+ }
+
+ private:
+ std::vector<std::unique_ptr<test::VideoFrameWriter>> delegates_;
+};
+
+} // namespace
+
+VideoWriter::VideoWriter(test::VideoFrameWriter* video_writer,
+ int sampling_modulo)
+ : video_writer_(video_writer), sampling_modulo_(sampling_modulo) {}
+
+void VideoWriter::OnFrame(const VideoFrame& frame) {
+ if (frames_counter_++ % sampling_modulo_ != 0) {
+ return;
+ }
+ bool result = video_writer_->WriteFrame(frame);
+ RTC_CHECK(result) << "Failed to write frame";
+}
+
+std::unique_ptr<test::VideoFrameWriter> CreateVideoFrameWithIdsWriter(
+ std::unique_ptr<test::VideoFrameWriter> video_writer_delegate,
+ absl::string_view frame_ids_dump_file_name) {
+ std::vector<std::unique_ptr<test::VideoFrameWriter>> requested_writers;
+ requested_writers.push_back(std::move(video_writer_delegate));
+ requested_writers.push_back(
+ std::make_unique<VideoFrameIdsWriter>(frame_ids_dump_file_name));
+ return std::make_unique<BroadcastingFrameWriter>(
+ std::move(requested_writers));
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.h
new file mode 100644
index 0000000000..cad4e1bdbf
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_DUMPING_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_DUMPING_H_
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// `VideoSinkInterface` to dump incoming video frames into specified video
+// writer.
+class VideoWriter final : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ // Creates video writer. Caller keeps ownership of `video_writer` and is
+ // responsible for closing it after VideoWriter will be destroyed.
+ VideoWriter(test::VideoFrameWriter* video_writer, int sampling_modulo);
+ VideoWriter(const VideoWriter&) = delete;
+ VideoWriter& operator=(const VideoWriter&) = delete;
+ ~VideoWriter() override = default;
+
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ test::VideoFrameWriter* const video_writer_;
+ const int sampling_modulo_;
+
+ int64_t frames_counter_ = 0;
+};
+
+// Creates a `VideoFrameWriter` to dump video frames together with their ids.
+// It uses provided `video_writer_delegate` to write video itself. Frame ids
+// will be logged into the specified file.
+std::unique_ptr<test::VideoFrameWriter> CreateVideoFrameWithIdsWriter(
+ std::unique_ptr<test::VideoFrameWriter> video_writer_delegate,
+ absl::string_view frame_ids_dump_file_name);
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_DUMPING_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc
new file mode 100644
index 0000000000..5dd4021516
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_dumping_test.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/analyzer/video/video_dumping.h"
+
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/random.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::Test;
+
+uint8_t RandByte(Random& random) {
+ return random.Rand(255);
+}
+
+VideoFrame CreateRandom2x2VideoFrame(uint16_t id, Random& random) {
+ rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(2, 2);
+
+ uint8_t data[6] = {RandByte(random), RandByte(random), RandByte(random),
+ RandByte(random), RandByte(random), RandByte(random)};
+
+ memcpy(buffer->MutableDataY(), data, 2);
+ memcpy(buffer->MutableDataY() + buffer->StrideY(), data + 2, 2);
+ memcpy(buffer->MutableDataU(), data + 4, 1);
+ memcpy(buffer->MutableDataV(), data + 5, 1);
+
+ return VideoFrame::Builder()
+ .set_id(id)
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_us(1)
+ .build();
+}
+
+std::vector<uint8_t> AsVector(const uint8_t* data, size_t size) {
+ std::vector<uint8_t> out;
+ out.assign(data, data + size);
+ return out;
+}
+
+void AssertFramesEqual(rtc::scoped_refptr<webrtc::I420BufferInterface> actual,
+ rtc::scoped_refptr<VideoFrameBuffer> expected) {
+ ASSERT_THAT(actual->width(), Eq(expected->width()));
+ ASSERT_THAT(actual->height(), Eq(expected->height()));
+ rtc::scoped_refptr<webrtc::I420BufferInterface> expected_i420 =
+ expected->ToI420();
+
+ int height = actual->height();
+
+ EXPECT_THAT(AsVector(actual->DataY(), actual->StrideY() * height),
+ ElementsAreArray(expected_i420->DataY(),
+ expected_i420->StrideY() * height));
+ EXPECT_THAT(AsVector(actual->DataU(), actual->StrideU() * (height + 1) / 2),
+ ElementsAreArray(expected_i420->DataU(),
+ expected_i420->StrideU() * (height + 1) / 2));
+ EXPECT_THAT(AsVector(actual->DataV(), actual->StrideV() * (height + 1) / 2),
+ ElementsAreArray(expected_i420->DataV(),
+ expected_i420->StrideV() * (height + 1) / 2));
+}
+
+void AssertFrameIdsAre(const std::string& filename,
+ std::vector<std::string> expected_ids) {
+ FILE* file = fopen(filename.c_str(), "r");
+ ASSERT_TRUE(file != nullptr);
+ std::vector<std::string> actual_ids;
+ char buffer[8];
+ while (fgets(buffer, sizeof buffer, file) != nullptr) {
+ std::string current_id(buffer);
+ ASSERT_GE(current_id.size(), 2lu);
+ // Trim "\n" at the end.
+ actual_ids.push_back(current_id.substr(0, current_id.size() - 1));
+ }
+ EXPECT_THAT(actual_ids, ElementsAreArray(expected_ids));
+}
+
+class VideoDumpingTest : public Test {
+ protected:
+ ~VideoDumpingTest() override = default;
+
+ void SetUp() override {
+ video_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "video_dumping_test");
+ ids_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "video_dumping_test");
+ }
+
+ void TearDown() override {
+ remove(video_filename_.c_str());
+ remove(ids_filename_.c_str());
+ }
+
+ std::string video_filename_;
+ std::string ids_filename_;
+};
+
+using CreateVideoFrameWithIdsWriterTest = VideoDumpingTest;
+
+TEST_F(CreateVideoFrameWithIdsWriterTest, VideoIsWritenWithFrameIds) {
+ Random random(/*seed=*/100);
+ VideoFrame frame1 = CreateRandom2x2VideoFrame(1, random);
+ VideoFrame frame2 = CreateRandom2x2VideoFrame(2, random);
+
+ std::unique_ptr<test::VideoFrameWriter> writer =
+ CreateVideoFrameWithIdsWriter(
+ std::make_unique<test::Y4mVideoFrameWriterImpl>(
+ std::string(video_filename_),
+ /*width=*/2, /*height=*/2, /*fps=*/2),
+ ids_filename_);
+
+ ASSERT_TRUE(writer->WriteFrame(frame1));
+ ASSERT_TRUE(writer->WriteFrame(frame2));
+ writer->Close();
+
+ auto frame_reader = test::CreateY4mFrameReader(video_filename_);
+ EXPECT_THAT(frame_reader->num_frames(), Eq(2));
+ AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer());
+ AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer());
+ AssertFrameIdsAre(ids_filename_, {"1", "2"});
+}
+
+using VideoWriterTest = VideoDumpingTest;
+
+TEST_F(VideoWriterTest, AllFramesAreWrittenWithSamplingModulo1) {
+ Random random(/*seed=*/100);
+ VideoFrame frame1 = CreateRandom2x2VideoFrame(1, random);
+ VideoFrame frame2 = CreateRandom2x2VideoFrame(2, random);
+
+ {
+ test::Y4mVideoFrameWriterImpl frame_writer(std::string(video_filename_),
+ /*width=*/2, /*height=*/2,
+ /*fps=*/2);
+ VideoWriter writer(&frame_writer, /*sampling_modulo=*/1);
+
+ writer.OnFrame(frame1);
+ writer.OnFrame(frame2);
+ frame_writer.Close();
+ }
+
+ auto frame_reader = test::CreateY4mFrameReader(video_filename_);
+ EXPECT_THAT(frame_reader->num_frames(), Eq(2));
+ AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer());
+ AssertFramesEqual(frame_reader->PullFrame(), frame2.video_frame_buffer());
+}
+
+TEST_F(VideoWriterTest, OnlyEvery2ndFramesIsWrittenWithSamplingModulo2) {
+ Random random(/*seed=*/100);
+ VideoFrame frame1 = CreateRandom2x2VideoFrame(1, random);
+ VideoFrame frame2 = CreateRandom2x2VideoFrame(2, random);
+ VideoFrame frame3 = CreateRandom2x2VideoFrame(3, random);
+
+ {
+ test::Y4mVideoFrameWriterImpl frame_writer(std::string(video_filename_),
+ /*width=*/2, /*height=*/2,
+ /*fps=*/2);
+ VideoWriter writer(&frame_writer, /*sampling_modulo=*/2);
+
+ writer.OnFrame(frame1);
+ writer.OnFrame(frame2);
+ writer.OnFrame(frame3);
+ frame_writer.Close();
+ }
+
+ auto frame_reader = test::CreateY4mFrameReader(video_filename_);
+ EXPECT_THAT(frame_reader->num_frames(), Eq(2));
+ AssertFramesEqual(frame_reader->PullFrame(), frame1.video_frame_buffer());
+ AssertFramesEqual(frame_reader->PullFrame(), frame3.video_frame_buffer());
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.cc
new file mode 100644
index 0000000000..5a74d60250
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h"
+
+#include "absl/memory/memory.h"
+#include "api/video/encoded_image.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+EncodedImage VideoFrameTrackingIdInjector::InjectData(
+ uint16_t id,
+ bool unused_discard,
+ const EncodedImage& source) {
+ RTC_CHECK(!unused_discard);
+ EncodedImage out = source;
+ out.SetVideoFrameTrackingId(id);
+ return out;
+}
+
+EncodedImageExtractionResult VideoFrameTrackingIdInjector::ExtractData(
+ const EncodedImage& source) {
+ return EncodedImageExtractionResult{source.VideoFrameTrackingId(), source,
+ /*discard=*/false};
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h
new file mode 100644
index 0000000000..ecc3cd3f51
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_FRAME_TRACKING_ID_INJECTOR_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_FRAME_TRACKING_ID_INJECTOR_H_
+
+#include <cstdint>
+
+#include "api/video/encoded_image.h"
+#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// This injector sets and retrieves the provided id in the EncodedImage
+// video_frame_tracking_id field. This is only possible with the RTP header
+// extension VideoFrameTrackingIdExtension that will propagate the input
+// tracking id to the received EncodedImage. This RTP header extension is
+// enabled with the field trial WebRTC-VideoFrameTrackingIdAdvertised
+// (http://www.webrtc.org/experiments/rtp-hdrext/video-frame-tracking-id).
+//
+// Note that this injector doesn't allow to discard frames.
+class VideoFrameTrackingIdInjector : public EncodedImageDataPropagator {
+ public:
+ EncodedImage InjectData(uint16_t id,
+ bool unused_discard,
+ const EncodedImage& source) override;
+
+ EncodedImageExtractionResult ExtractData(const EncodedImage& source) override;
+
+ void Start(int) override {}
+ void AddParticipantInCall() override {}
+ void RemoveParticipantInCall() override {}
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_FRAME_TRACKING_ID_INJECTOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc
new file mode 100644
index 0000000000..c7d453c4bb
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h"
+
+#include "api/video/encoded_image.h"
+#include "rtc_base/buffer.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+EncodedImage CreateEncodedImageOfSizeN(size_t n) {
+ EncodedImage image;
+ rtc::scoped_refptr<EncodedImageBuffer> buffer = EncodedImageBuffer::Create(n);
+ for (size_t i = 0; i < n; ++i) {
+ buffer->data()[i] = static_cast<uint8_t>(i);
+ }
+ image.SetEncodedData(buffer);
+ return image;
+}
+
+TEST(VideoFrameTrackingIdInjectorTest, InjectExtractDiscardFalse) {
+ VideoFrameTrackingIdInjector injector;
+ EncodedImage source = CreateEncodedImageOfSizeN(10);
+ EncodedImageExtractionResult out =
+ injector.ExtractData(injector.InjectData(512, false, source));
+
+ ASSERT_TRUE(out.id.has_value());
+ EXPECT_EQ(*out.id, 512);
+ EXPECT_FALSE(out.discard);
+ EXPECT_EQ(out.image.size(), 10ul);
+ for (int i = 0; i < 10; ++i) {
+ EXPECT_EQ(source.data()[i], out.image.data()[i]);
+ }
+}
+
+#if GTEST_HAS_DEATH_TEST
+TEST(VideoFrameTrackingIdInjectorTest, InjectExtractDiscardTrue) {
+ VideoFrameTrackingIdInjector injector;
+ EncodedImage source = CreateEncodedImageOfSizeN(10);
+
+ EXPECT_DEATH(injector.InjectData(512, true, source), "");
+}
+#endif // GTEST_HAS_DEATH_TEST
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
new file mode 100644
index 0000000000..87c11886cc
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
+
+#include <stdio.h>
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h"
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h"
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
+#include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h"
+#include "test/pc/e2e/analyzer/video/video_dumping.h"
+#include "test/testsupport/fixed_fps_video_frame_writer_adapter.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using webrtc::webrtc_pc_e2e::VideoConfig;
+using EmulatedSFUConfigMap =
+ ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap;
+
+class AnalyzingFramePreprocessor
+ : public test::TestVideoCapturer::FramePreprocessor {
+ public:
+ AnalyzingFramePreprocessor(
+ absl::string_view peer_name,
+ absl::string_view stream_label,
+ VideoQualityAnalyzerInterface* analyzer,
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks)
+ : peer_name_(peer_name),
+ stream_label_(stream_label),
+ analyzer_(analyzer),
+ sinks_(std::move(sinks)) {}
+ ~AnalyzingFramePreprocessor() override = default;
+
+ VideoFrame Preprocess(const VideoFrame& source_frame) override {
+ // Copy VideoFrame to be able to set id on it.
+ VideoFrame frame = source_frame;
+ uint16_t frame_id =
+ analyzer_->OnFrameCaptured(peer_name_, stream_label_, frame);
+ frame.set_id(frame_id);
+
+ for (auto& sink : sinks_) {
+ sink->OnFrame(frame);
+ }
+ return frame;
+ }
+
+ private:
+ const std::string peer_name_;
+ const std::string stream_label_;
+ VideoQualityAnalyzerInterface* const analyzer_;
+ const std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
+ sinks_;
+};
+
+} // namespace
+
+VideoQualityAnalyzerInjectionHelper::VideoQualityAnalyzerInjectionHelper(
+ Clock* clock,
+ std::unique_ptr<VideoQualityAnalyzerInterface> analyzer,
+ EncodedImageDataInjector* injector,
+ EncodedImageDataExtractor* extractor)
+ : clock_(clock),
+ analyzer_(std::move(analyzer)),
+ injector_(injector),
+ extractor_(extractor) {
+ RTC_DCHECK(clock_);
+ RTC_DCHECK(injector_);
+ RTC_DCHECK(extractor_);
+}
+VideoQualityAnalyzerInjectionHelper::~VideoQualityAnalyzerInjectionHelper() =
+ default;
+
+std::unique_ptr<VideoEncoderFactory>
+VideoQualityAnalyzerInjectionHelper::WrapVideoEncoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoEncoderFactory> delegate,
+ double bitrate_multiplier,
+ EmulatedSFUConfigMap stream_to_sfu_config) const {
+ return std::make_unique<QualityAnalyzingVideoEncoderFactory>(
+ peer_name, std::move(delegate), bitrate_multiplier,
+ std::move(stream_to_sfu_config), injector_, analyzer_.get());
+}
+
+std::unique_ptr<VideoDecoderFactory>
+VideoQualityAnalyzerInjectionHelper::WrapVideoDecoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoDecoderFactory> delegate) const {
+ return std::make_unique<QualityAnalyzingVideoDecoderFactory>(
+ peer_name, std::move(delegate), extractor_, analyzer_.get());
+}
+
+std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
+VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor(
+ absl::string_view peer_name,
+ const VideoConfig& config) {
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
+ if (config.input_dump_options.has_value()) {
+ std::unique_ptr<test::VideoFrameWriter> writer =
+ config.input_dump_options->CreateInputDumpVideoFrameWriter(
+ *config.stream_label, config.GetResolution());
+ sinks.push_back(std::make_unique<VideoWriter>(
+ writer.get(), config.input_dump_options->sampling_modulo()));
+ video_writers_.push_back(std::move(writer));
+ }
+ if (config.show_on_screen) {
+ sinks.push_back(absl::WrapUnique(
+ test::VideoRenderer::Create((*config.stream_label + "-capture").c_str(),
+ config.width, config.height)));
+ }
+ sinks_helper_.AddConfig(peer_name, config);
+ {
+ MutexLock lock(&mutex_);
+ known_video_configs_.insert({*config.stream_label, config});
+ }
+ return std::make_unique<AnalyzingFramePreprocessor>(
+ peer_name, std::move(*config.stream_label), analyzer_.get(),
+ std::move(sinks));
+}
+
+std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>
+VideoQualityAnalyzerInjectionHelper::CreateVideoSink(
+ absl::string_view peer_name) {
+ return std::make_unique<AnalyzingVideoSink2>(peer_name, this);
+}
+
+std::unique_ptr<AnalyzingVideoSink>
+VideoQualityAnalyzerInjectionHelper::CreateVideoSink(
+ absl::string_view peer_name,
+ const VideoSubscription& subscription,
+ bool report_infra_metrics) {
+ return std::make_unique<AnalyzingVideoSink>(peer_name, clock_, *analyzer_,
+ sinks_helper_, subscription,
+ report_infra_metrics);
+}
+
+void VideoQualityAnalyzerInjectionHelper::Start(
+ std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count) {
+ analyzer_->Start(std::move(test_case_name), peer_names, max_threads_count);
+ extractor_->Start(peer_names.size());
+ MutexLock lock(&mutex_);
+ peers_count_ = peer_names.size();
+}
+
+void VideoQualityAnalyzerInjectionHelper::RegisterParticipantInCall(
+ absl::string_view peer_name) {
+ analyzer_->RegisterParticipantInCall(peer_name);
+ extractor_->AddParticipantInCall();
+ MutexLock lock(&mutex_);
+ peers_count_++;
+}
+
+void VideoQualityAnalyzerInjectionHelper::UnregisterParticipantInCall(
+ absl::string_view peer_name) {
+ analyzer_->UnregisterParticipantInCall(peer_name);
+ extractor_->RemoveParticipantInCall();
+ MutexLock lock(&mutex_);
+ peers_count_--;
+}
+
+void VideoQualityAnalyzerInjectionHelper::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ analyzer_->OnStatsReports(pc_label, report);
+}
+
+void VideoQualityAnalyzerInjectionHelper::Stop() {
+ analyzer_->Stop();
+ for (const auto& video_writer : video_writers_) {
+ video_writer->Close();
+ }
+ video_writers_.clear();
+ sinks_helper_.Clear();
+}
+
+void VideoQualityAnalyzerInjectionHelper::OnFrame(absl::string_view peer_name,
+ const VideoFrame& frame) {
+ if (IsDummyFrame(frame)) {
+ // This is dummy frame, so we don't need to process it further.
+ return;
+ }
+ // Copy entire video frame including video buffer to ensure that analyzer
+ // won't hold any WebRTC internal buffers.
+ VideoFrame frame_copy = frame;
+ frame_copy.set_video_frame_buffer(
+ I420Buffer::Copy(*frame.video_frame_buffer()->ToI420()));
+ analyzer_->OnFrameRendered(peer_name, frame_copy);
+
+ if (frame.id() != VideoFrame::kNotSetId) {
+ std::string stream_label = analyzer_->GetStreamLabel(frame.id());
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>* sinks =
+ PopulateSinks(ReceiverStream(peer_name, stream_label));
+ if (sinks == nullptr) {
+ return;
+ }
+ for (auto& sink : *sinks) {
+ sink->OnFrame(frame);
+ }
+ }
+}
+
+std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>*
+VideoQualityAnalyzerInjectionHelper::PopulateSinks(
+ const ReceiverStream& receiver_stream) {
+ MutexLock lock(&mutex_);
+ auto sinks_it = sinks_.find(receiver_stream);
+ if (sinks_it != sinks_.end()) {
+ return &sinks_it->second;
+ }
+ auto it = known_video_configs_.find(receiver_stream.stream_label);
+ RTC_DCHECK(it != known_video_configs_.end())
+ << "No video config for stream " << receiver_stream.stream_label;
+ const VideoConfig& config = it->second;
+
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>> sinks;
+ if (config.output_dump_options.has_value()) {
+ std::unique_ptr<test::VideoFrameWriter> writer =
+ config.output_dump_options->CreateOutputDumpVideoFrameWriter(
+ receiver_stream.stream_label, receiver_stream.peer_name,
+ config.GetResolution());
+ if (config.output_dump_use_fixed_framerate) {
+ writer = std::make_unique<test::FixedFpsVideoFrameWriterAdapter>(
+ config.fps, clock_, std::move(writer));
+ }
+ sinks.push_back(std::make_unique<VideoWriter>(
+ writer.get(), config.output_dump_options->sampling_modulo()));
+ video_writers_.push_back(std::move(writer));
+ }
+ if (config.show_on_screen) {
+ sinks.push_back(absl::WrapUnique(
+ test::VideoRenderer::Create((*config.stream_label + "-render").c_str(),
+ config.width, config.height)));
+ }
+ sinks_.insert({receiver_stream, std::move(sinks)});
+ return &(sinks_.find(receiver_stream)->second);
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
new file mode 100644
index 0000000000..7421c8e4a7
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_ANALYZER_INJECTION_HELPER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_ANALYZER_INJECTION_HELPER_H_
+
+#include <stdio.h>
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/stats_observer_interface.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/analyzing_video_sink.h"
+#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h"
+#include "test/pc/e2e/analyzer/video/encoded_image_data_injector.h"
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
+#include "test/test_video_capturer.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Provides factory methods for components, that will be used to inject
+// VideoQualityAnalyzerInterface into PeerConnection pipeline.
+class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface {
+ public:
+ VideoQualityAnalyzerInjectionHelper(
+ Clock* clock,
+ std::unique_ptr<VideoQualityAnalyzerInterface> analyzer,
+ EncodedImageDataInjector* injector,
+ EncodedImageDataExtractor* extractor);
+ ~VideoQualityAnalyzerInjectionHelper() override;
+
+ // Wraps video encoder factory to give video quality analyzer access to frames
+ // before encoding and encoded images after.
+ std::unique_ptr<VideoEncoderFactory> WrapVideoEncoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoEncoderFactory> delegate,
+ double bitrate_multiplier,
+ QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap stream_to_sfu_config)
+ const;
+ // Wraps video decoder factory to give video quality analyzer access to
+ // received encoded images and frames, that were decoded from them.
+ std::unique_ptr<VideoDecoderFactory> WrapVideoDecoderFactory(
+ absl::string_view peer_name,
+ std::unique_ptr<VideoDecoderFactory> delegate) const;
+
+ // Creates VideoFrame preprocessor, that will allow video quality analyzer to
+ // get access to the captured frames. If provided config also specifies
+ // `input_dump_file_name`, video will be written into that file.
+ std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
+ CreateFramePreprocessor(absl::string_view peer_name,
+ const webrtc::webrtc_pc_e2e::VideoConfig& config);
+ // Creates sink, that will allow video quality analyzer to get access to
+ // the rendered frames. If corresponding video track has
+ // `output_dump_file_name` in its VideoConfig, which was used for
+ // CreateFramePreprocessor(...), then video also will be written
+ // into that file.
+ // TODO(titovartem): Remove method with `peer_name` only parameter.
+ std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> CreateVideoSink(
+ absl::string_view peer_name);
+ std::unique_ptr<AnalyzingVideoSink> CreateVideoSink(
+ absl::string_view peer_name,
+ const VideoSubscription& subscription,
+ bool report_infra_metrics);
+
+ void Start(std::string test_case_name,
+ rtc::ArrayView<const std::string> peer_names,
+ int max_threads_count = 1);
+
+ // Registers new call participant to the underlying video quality analyzer.
+ // The method should be called before the participant is actually added.
+ void RegisterParticipantInCall(absl::string_view peer_name);
+
+ // Will be called after test removed existing participant in the middle of the
+ // call.
+ void UnregisterParticipantInCall(absl::string_view peer_name);
+
+ // Forwards `stats_reports` for Peer Connection `pc_label` to
+ // `analyzer_`.
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+
+ // Stops VideoQualityAnalyzerInterface to populate final data and metrics.
+ // Should be invoked after analyzed video tracks are disposed.
+ void Stop();
+
+ private:
+ // Deprecated, to be removed when old API isn't used anymore.
+ class AnalyzingVideoSink2 final : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ explicit AnalyzingVideoSink2(absl::string_view peer_name,
+ VideoQualityAnalyzerInjectionHelper* helper)
+ : peer_name_(peer_name), helper_(helper) {}
+ ~AnalyzingVideoSink2() override = default;
+
+ void OnFrame(const VideoFrame& frame) override {
+ helper_->OnFrame(peer_name_, frame);
+ }
+
+ private:
+ const std::string peer_name_;
+ VideoQualityAnalyzerInjectionHelper* const helper_;
+ };
+
+ struct ReceiverStream {
+ ReceiverStream(absl::string_view peer_name, absl::string_view stream_label)
+ : peer_name(peer_name), stream_label(stream_label) {}
+
+ std::string peer_name;
+ std::string stream_label;
+
+ // Define operators required to use ReceiverStream as std::map key.
+ bool operator==(const ReceiverStream& o) const {
+ return peer_name == o.peer_name && stream_label == o.stream_label;
+ }
+ bool operator<(const ReceiverStream& o) const {
+ return (peer_name == o.peer_name) ? stream_label < o.stream_label
+ : peer_name < o.peer_name;
+ }
+ };
+
+ // Creates a deep copy of the frame and passes it to the video analyzer, while
+ // passing real frame to the sinks
+ void OnFrame(absl::string_view peer_name, const VideoFrame& frame);
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>*
+ PopulateSinks(const ReceiverStream& receiver_stream);
+
+ Clock* const clock_;
+ std::unique_ptr<VideoQualityAnalyzerInterface> analyzer_;
+ EncodedImageDataInjector* injector_;
+ EncodedImageDataExtractor* extractor_;
+
+ std::vector<std::unique_ptr<test::VideoFrameWriter>> video_writers_;
+
+ AnalyzingVideoSinksHelper sinks_helper_;
+ Mutex mutex_;
+ int peers_count_ RTC_GUARDED_BY(mutex_);
+ // Map from stream label to the video config.
+ std::map<std::string, webrtc::webrtc_pc_e2e::VideoConfig> known_video_configs_
+ RTC_GUARDED_BY(mutex_);
+ std::map<ReceiverStream,
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>>
+ sinks_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_ANALYZER_INJECTION_HELPER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
new file mode 100644
index 0000000000..8049af308e
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h"
+
+#include <map>
+#include <string>
+
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/metric.h"
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+using ::webrtc::webrtc_pc_e2e::MetricMetadataKey;
+
+SamplesStatsCounter BytesPerSecondToKbps(const SamplesStatsCounter& counter) {
+ return counter * 0.008;
+}
+
+} // namespace
+
+VideoQualityMetricsReporter::VideoQualityMetricsReporter(
+ Clock* const clock,
+ test::MetricsLogger* const metrics_logger)
+ : clock_(clock), metrics_logger_(metrics_logger) {
+ RTC_CHECK(metrics_logger_);
+}
+
+void VideoQualityMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* /*reporter_helper*/) {
+ test_case_name_ = std::string(test_case_name);
+ start_time_ = Now();
+}
+
+void VideoQualityMetricsReporter::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ RTC_CHECK(start_time_)
+ << "Please invoke Start(...) method before calling OnStatsReports(...)";
+
+ auto transport_stats = report->GetStatsOfType<RTCTransportStats>();
+ if (transport_stats.size() == 0u ||
+ !transport_stats[0]->selected_candidate_pair_id.is_defined()) {
+ return;
+ }
+ RTC_DCHECK_EQ(transport_stats.size(), 1);
+ std::string selected_ice_id =
+ transport_stats[0]->selected_candidate_pair_id.ValueToString();
+ // Use the selected ICE candidate pair ID to get the appropriate ICE stats.
+ const RTCIceCandidatePairStats ice_candidate_pair_stats =
+ report->Get(selected_ice_id)->cast_to<const RTCIceCandidatePairStats>();
+
+ auto outbound_rtp_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ StatsSample sample;
+ for (auto& s : outbound_rtp_stats) {
+ if (!s->kind.is_defined()) {
+ continue;
+ }
+ if (!(*s->kind == RTCMediaStreamTrackKind::kVideo)) {
+ continue;
+ }
+ if (s->timestamp() > sample.sample_time) {
+ sample.sample_time = s->timestamp();
+ }
+ sample.retransmitted_bytes_sent +=
+ DataSize::Bytes(s->retransmitted_bytes_sent.ValueOrDefault(0ul));
+ sample.bytes_sent += DataSize::Bytes(s->bytes_sent.ValueOrDefault(0ul));
+ sample.header_bytes_sent +=
+ DataSize::Bytes(s->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ MutexLock lock(&video_bwe_stats_lock_);
+ VideoBweStats& video_bwe_stats = video_bwe_stats_[std::string(pc_label)];
+ if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) {
+ video_bwe_stats.available_send_bandwidth.AddSample(
+ DataRate::BitsPerSec(
+ *ice_candidate_pair_stats.available_outgoing_bitrate)
+ .bytes_per_sec());
+ }
+
+ StatsSample prev_sample = last_stats_sample_[std::string(pc_label)];
+ if (prev_sample.sample_time.IsZero()) {
+ prev_sample.sample_time = start_time_.value();
+ }
+ last_stats_sample_[std::string(pc_label)] = sample;
+
+ TimeDelta time_between_samples = sample.sample_time - prev_sample.sample_time;
+ if (time_between_samples.IsZero()) {
+ return;
+ }
+
+ DataRate retransmission_bitrate =
+ (sample.retransmitted_bytes_sent - prev_sample.retransmitted_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.retransmission_bitrate.AddSample(
+ retransmission_bitrate.bytes_per_sec());
+ DataRate transmission_bitrate =
+ (sample.bytes_sent + sample.header_bytes_sent - prev_sample.bytes_sent -
+ prev_sample.header_bytes_sent) /
+ time_between_samples;
+ video_bwe_stats.transmission_bitrate.AddSample(
+ transmission_bitrate.bytes_per_sec());
+}
+
+void VideoQualityMetricsReporter::StopAndReportResults() {
+ MutexLock video_bwemutex_(&video_bwe_stats_lock_);
+ for (const auto& item : video_bwe_stats_) {
+ ReportVideoBweResults(item.first, item.second);
+ }
+}
+
+std::string VideoQualityMetricsReporter::GetTestCaseName(
+ const std::string& peer_name) const {
+ return test_case_name_ + "/" + peer_name;
+}
+
+void VideoQualityMetricsReporter::ReportVideoBweResults(
+ const std::string& peer_name,
+ const VideoBweStats& video_bwe_stats) {
+ std::string test_case_name = GetTestCaseName(peer_name);
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> metric_metadata{
+ {MetricMetadataKey::kPeerMetadataKey, peer_name},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+
+ metrics_logger_->LogMetric(
+ "available_send_bandwidth", test_case_name,
+ BytesPerSecondToKbps(video_bwe_stats.available_send_bandwidth),
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric(
+ "transmission_bitrate", test_case_name,
+ BytesPerSecondToKbps(video_bwe_stats.transmission_bitrate),
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+ metrics_logger_->LogMetric(
+ "retransmission_bitrate", test_case_name,
+ BytesPerSecondToKbps(video_bwe_stats.retransmission_bitrate),
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
new file mode 100644
index 0000000000..d3d976343b
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_
+#define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+struct VideoBweStats {
+ SamplesStatsCounter available_send_bandwidth;
+ SamplesStatsCounter transmission_bitrate;
+ SamplesStatsCounter retransmission_bitrate;
+};
+
+class VideoQualityMetricsReporter
+ : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
+ public:
+ VideoQualityMetricsReporter(Clock* const clock,
+ test::MetricsLogger* const metrics_logger);
+ ~VideoQualityMetricsReporter() override = default;
+
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void StopAndReportResults() override;
+
+ private:
+ struct StatsSample {
+ DataSize bytes_sent = DataSize::Zero();
+ DataSize header_bytes_sent = DataSize::Zero();
+ DataSize retransmitted_bytes_sent = DataSize::Zero();
+
+ Timestamp sample_time = Timestamp::Zero();
+ };
+
+ std::string GetTestCaseName(const std::string& peer_name) const;
+ void ReportVideoBweResults(const std::string& peer_name,
+ const VideoBweStats& video_bwe_stats);
+ Timestamp Now() const { return clock_->CurrentTime(); }
+
+ Clock* const clock_;
+ test::MetricsLogger* const metrics_logger_;
+
+ std::string test_case_name_;
+ absl::optional<Timestamp> start_time_;
+
+ Mutex video_bwe_stats_lock_;
+ // Map between a peer connection label (provided by the framework) and
+ // its video BWE stats.
+ std::map<std::string, VideoBweStats> video_bwe_stats_
+ RTC_GUARDED_BY(video_bwe_stats_lock_);
+ std::map<std::string, StatsSample> last_stats_sample_
+ RTC_GUARDED_BY(video_bwe_stats_lock_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer_helper.cc b/third_party/libwebrtc/test/pc/e2e/analyzer_helper.cc
new file mode 100644
index 0000000000..76cd9a7c78
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer_helper.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/analyzer_helper.h"
+
+#include <string>
+#include <utility>
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+AnalyzerHelper::AnalyzerHelper() {
+ signaling_sequence_checker_.Detach();
+}
+
+void AnalyzerHelper::AddTrackToStreamMapping(
+ absl::string_view track_id,
+ absl::string_view receiver_peer,
+ absl::string_view stream_label,
+ absl::optional<std::string> sync_group) {
+ RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
+ track_to_stream_map_.insert(
+ {std::string(track_id),
+ StreamInfo{.receiver_peer = std::string(receiver_peer),
+ .stream_label = std::string(stream_label),
+ .sync_group = sync_group.has_value()
+ ? *sync_group
+ : std::string(stream_label)}});
+}
+
+void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id,
+ std::string stream_label) {
+ RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
+ track_to_stream_map_.insert(
+ {std::move(track_id), StreamInfo{stream_label, stream_label}});
+}
+
+void AnalyzerHelper::AddTrackToStreamMapping(std::string track_id,
+ std::string stream_label,
+ std::string sync_group) {
+ RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
+ track_to_stream_map_.insert(
+ {std::move(track_id),
+ StreamInfo{std::move(stream_label), std::move(sync_group)}});
+}
+
+AnalyzerHelper::StreamInfo AnalyzerHelper::GetStreamInfoFromTrackId(
+ absl::string_view track_id) const {
+ RTC_DCHECK_RUN_ON(&signaling_sequence_checker_);
+ auto track_to_stream_pair = track_to_stream_map_.find(std::string(track_id));
+ RTC_CHECK(track_to_stream_pair != track_to_stream_map_.end());
+ return track_to_stream_pair->second;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer_helper.h b/third_party/libwebrtc/test/pc/e2e/analyzer_helper.h
new file mode 100644
index 0000000000..d0b47c4fb9
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/analyzer_helper.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ANALYZER_HELPER_H_
+#define TEST_PC_E2E_ANALYZER_HELPER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "rtc_base/thread_annotations.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// This class is a utility that provides bookkeeping capabilities that
+// are useful to associate stats reports track_ids to the remote stream info.
+// The framework will populate an instance of this class and it will pass
+// it to the Start method of Media Quality Analyzers.
+// An instance of AnalyzerHelper must only be accessed from a single
+// thread and since stats collection happens on the signaling thread,
+// AddTrackToStreamMapping, GetStreamLabelFromTrackId and
+// GetSyncGroupLabelFromTrackId must be invoked from the signaling thread. Get
+// methods should be invoked only after all data is added. Mixing Get methods
+// with adding new data may lead to undefined behavior.
+class AnalyzerHelper : public TrackIdStreamInfoMap {
+ public:
+ AnalyzerHelper();
+
+ void AddTrackToStreamMapping(absl::string_view track_id,
+ absl::string_view receiver_peer,
+ absl::string_view stream_label,
+ absl::optional<std::string> sync_group);
+ void AddTrackToStreamMapping(std::string track_id, std::string stream_label);
+ void AddTrackToStreamMapping(std::string track_id,
+ std::string stream_label,
+ std::string sync_group);
+
+ StreamInfo GetStreamInfoFromTrackId(
+ absl::string_view track_id) const override;
+
+ private:
+ SequenceChecker signaling_sequence_checker_;
+ std::map<std::string, StreamInfo> track_to_stream_map_
+ RTC_GUARDED_BY(signaling_sequence_checker_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ANALYZER_HELPER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc
new file mode 100644
index 0000000000..0d4fe7478d
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.cc
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/cross_media_metrics_reporter.h"
+
+#include <utility>
+#include <vector>
+
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/metric.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+CrossMediaMetricsReporter::CrossMediaMetricsReporter(
+ test::MetricsLogger* metrics_logger)
+ : metrics_logger_(metrics_logger) {
+ RTC_CHECK(metrics_logger_);
+}
+
+void CrossMediaMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) {
+ test_case_name_ = std::string(test_case_name);
+ reporter_helper_ = reporter_helper;
+}
+
+void CrossMediaMetricsReporter::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ std::map<std::string, std::vector<const RTCInboundRTPStreamStats*>>
+ sync_group_stats;
+ for (const auto& stat : inbound_stats) {
+ if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 &&
+ stat->track_identifier.is_defined()) {
+ sync_group_stats[reporter_helper_
+ ->GetStreamInfoFromTrackId(*stat->track_identifier)
+ .sync_group]
+ .push_back(stat);
+ }
+ }
+
+ MutexLock lock(&mutex_);
+ for (const auto& pair : sync_group_stats) {
+ // If there is less than two streams, it is not a sync group.
+ if (pair.second.size() < 2) {
+ continue;
+ }
+ auto sync_group = std::string(pair.first);
+ const RTCInboundRTPStreamStats* audio_stat = pair.second[0];
+ const RTCInboundRTPStreamStats* video_stat = pair.second[1];
+
+ RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.is_defined() &&
+ video_stat->kind.is_defined() &&
+ *audio_stat->kind != *video_stat->kind)
+ << "Sync group should consist of one audio and one video stream.";
+
+ if (*audio_stat->kind == RTCMediaStreamTrackKind::kVideo) {
+ std::swap(audio_stat, video_stat);
+ }
+ // Stream labels of a sync group are same for all polls, so we need it add
+ // it only once.
+ if (stats_info_.find(sync_group) == stats_info_.end()) {
+ RTC_CHECK(audio_stat->track_identifier.is_defined());
+ RTC_CHECK(video_stat->track_identifier.is_defined());
+ stats_info_[sync_group].audio_stream_info =
+ reporter_helper_->GetStreamInfoFromTrackId(
+ *audio_stat->track_identifier);
+ stats_info_[sync_group].video_stream_info =
+ reporter_helper_->GetStreamInfoFromTrackId(
+ *video_stat->track_identifier);
+ }
+
+ double audio_video_playout_diff = *audio_stat->estimated_playout_timestamp -
+ *video_stat->estimated_playout_timestamp;
+ if (audio_video_playout_diff > 0) {
+ stats_info_[sync_group].audio_ahead_ms.AddSample(
+ audio_video_playout_diff);
+ stats_info_[sync_group].video_ahead_ms.AddSample(0);
+ } else {
+ stats_info_[sync_group].audio_ahead_ms.AddSample(0);
+ stats_info_[sync_group].video_ahead_ms.AddSample(
+ std::abs(audio_video_playout_diff));
+ }
+ }
+}
+
+void CrossMediaMetricsReporter::StopAndReportResults() {
+ MutexLock lock(&mutex_);
+ for (const auto& pair : stats_info_) {
+ const std::string& sync_group = pair.first;
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> audio_metric_metadata{
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group},
+ {MetricMetadataKey::kAudioStreamMetadataKey,
+ pair.second.audio_stream_info.stream_label},
+ {MetricMetadataKey::kPeerMetadataKey,
+ pair.second.audio_stream_info.receiver_peer},
+ {MetricMetadataKey::kReceiverMetadataKey,
+ pair.second.audio_stream_info.receiver_peer},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+ metrics_logger_->LogMetric(
+ "audio_ahead_ms",
+ GetTestCaseName(pair.second.audio_stream_info.stream_label, sync_group),
+ pair.second.audio_ahead_ms, Unit::kMilliseconds,
+ webrtc::test::ImprovementDirection::kSmallerIsBetter,
+ std::move(audio_metric_metadata));
+
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> video_metric_metadata{
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group},
+ {MetricMetadataKey::kAudioStreamMetadataKey,
+ pair.second.video_stream_info.stream_label},
+ {MetricMetadataKey::kPeerMetadataKey,
+ pair.second.video_stream_info.receiver_peer},
+ {MetricMetadataKey::kReceiverMetadataKey,
+ pair.second.video_stream_info.receiver_peer},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+ metrics_logger_->LogMetric(
+ "video_ahead_ms",
+ GetTestCaseName(pair.second.video_stream_info.stream_label, sync_group),
+ pair.second.video_ahead_ms, Unit::kMilliseconds,
+ webrtc::test::ImprovementDirection::kSmallerIsBetter,
+ std::move(video_metric_metadata));
+ }
+}
+
+std::string CrossMediaMetricsReporter::GetTestCaseName(
+ const std::string& stream_label,
+ const std::string& sync_group) const {
+ return test_case_name_ + "/" + sync_group + "_" + stream_label;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.h b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.h
new file mode 100644
index 0000000000..2d51ebb20f
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/cross_media_metrics_reporter.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
+#define TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
+
+#include <map>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class CrossMediaMetricsReporter
+ : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
+ public:
+ explicit CrossMediaMetricsReporter(test::MetricsLogger* metrics_logger);
+ ~CrossMediaMetricsReporter() override = default;
+
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void StopAndReportResults() override;
+
+ private:
+ struct StatsInfo {
+ SamplesStatsCounter audio_ahead_ms;
+ SamplesStatsCounter video_ahead_ms;
+
+ TrackIdStreamInfoMap::StreamInfo audio_stream_info;
+ TrackIdStreamInfoMap::StreamInfo video_stream_info;
+ std::string audio_stream_label;
+ std::string video_stream_label;
+ };
+
+ std::string GetTestCaseName(const std::string& stream_label,
+ const std::string& sync_group) const;
+
+ test::MetricsLogger* const metrics_logger_;
+
+ std::string test_case_name_;
+ const TrackIdStreamInfoMap* reporter_helper_;
+
+ Mutex mutex_;
+ std::map<std::string, StatsInfo> stats_info_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.cc b/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.cc
new file mode 100644
index 0000000000..8fdabeb16f
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.cc
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/echo/echo_emulation.h"
+
+#include <limits>
+#include <utility>
+
+#include "api/test/pclf/media_configuration.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+constexpr int kSingleBufferDurationMs = 10;
+
+} // namespace
+
+EchoEmulatingCapturer::EchoEmulatingCapturer(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> capturer,
+ EchoEmulationConfig config)
+ : delegate_(std::move(capturer)),
+ config_(config),
+ renderer_queue_(2 * config_.echo_delay.ms() / kSingleBufferDurationMs),
+ queue_input_(TestAudioDeviceModule::SamplesPerFrame(
+ delegate_->SamplingFrequency()) *
+ delegate_->NumChannels()),
+ queue_output_(TestAudioDeviceModule::SamplesPerFrame(
+ delegate_->SamplingFrequency()) *
+ delegate_->NumChannels()) {
+ renderer_thread_.Detach();
+ capturer_thread_.Detach();
+}
+
+void EchoEmulatingCapturer::OnAudioRendered(
+ rtc::ArrayView<const int16_t> data) {
+ RTC_DCHECK_RUN_ON(&renderer_thread_);
+ if (!recording_started_) {
+ // Because rendering can start before capturing in the beginning we can have
+ // a set of empty audio data frames. So we will skip them and will start
+ // fill the queue only after 1st non-empty audio data frame will arrive.
+ bool is_empty = true;
+ for (auto d : data) {
+ if (d != 0) {
+ is_empty = false;
+ break;
+ }
+ }
+ if (is_empty) {
+ return;
+ }
+ recording_started_ = true;
+ }
+ queue_input_.assign(data.begin(), data.end());
+ if (!renderer_queue_.Insert(&queue_input_)) {
+ RTC_LOG(LS_WARNING) << "Echo queue is full";
+ }
+}
+
+bool EchoEmulatingCapturer::Capture(rtc::BufferT<int16_t>* buffer) {
+ RTC_DCHECK_RUN_ON(&capturer_thread_);
+ bool result = delegate_->Capture(buffer);
+ // Now we have to reduce input signal to avoid saturation when mixing in the
+ // fake echo.
+ for (size_t i = 0; i < buffer->size(); ++i) {
+ (*buffer)[i] /= 2;
+ }
+
+ // When we accumulated enough delay in the echo buffer we will pop from
+ // that buffer on each ::Capture(...) call. If the buffer become empty it
+ // will mean some bug, so we will crash during removing item from the queue.
+ if (!delay_accumulated_) {
+ delay_accumulated_ =
+ renderer_queue_.SizeAtLeast() >=
+ static_cast<size_t>(config_.echo_delay.ms() / kSingleBufferDurationMs);
+ }
+
+ if (delay_accumulated_) {
+ RTC_CHECK(renderer_queue_.Remove(&queue_output_));
+ for (size_t i = 0; i < buffer->size() && i < queue_output_.size(); ++i) {
+ int32_t res = (*buffer)[i] + queue_output_[i];
+ if (res < std::numeric_limits<int16_t>::min()) {
+ res = std::numeric_limits<int16_t>::min();
+ }
+ if (res > std::numeric_limits<int16_t>::max()) {
+ res = std::numeric_limits<int16_t>::max();
+ }
+ (*buffer)[i] = static_cast<int16_t>(res);
+ }
+ }
+
+ return result;
+}
+
+EchoEmulatingRenderer::EchoEmulatingRenderer(
+ std::unique_ptr<TestAudioDeviceModule::Renderer> renderer,
+ EchoEmulatingCapturer* echo_emulating_capturer)
+ : delegate_(std::move(renderer)),
+ echo_emulating_capturer_(echo_emulating_capturer) {
+ RTC_DCHECK(echo_emulating_capturer_);
+}
+
+bool EchoEmulatingRenderer::Render(rtc::ArrayView<const int16_t> data) {
+ if (data.size() > 0) {
+ echo_emulating_capturer_->OnAudioRendered(data);
+ }
+ return delegate_->Render(data);
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.h b/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.h
new file mode 100644
index 0000000000..359a481e46
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/echo/echo_emulation.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_ECHO_ECHO_EMULATION_H_
+#define TEST_PC_E2E_ECHO_ECHO_EMULATION_H_
+
+#include <atomic>
+#include <deque>
+#include <memory>
+#include <vector>
+
+#include "api/test/pclf/media_configuration.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "rtc_base/swap_queue.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Reduces audio input strength from provided capturer twice and adds input
+// provided into EchoEmulatingCapturer::OnAudioRendered(...).
+class EchoEmulatingCapturer : public TestAudioDeviceModule::Capturer {
+ public:
+ EchoEmulatingCapturer(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> capturer,
+ EchoEmulationConfig config);
+
+ void OnAudioRendered(rtc::ArrayView<const int16_t> data);
+
+ int SamplingFrequency() const override {
+ return delegate_->SamplingFrequency();
+ }
+ int NumChannels() const override { return delegate_->NumChannels(); }
+ bool Capture(rtc::BufferT<int16_t>* buffer) override;
+
+ private:
+ std::unique_ptr<TestAudioDeviceModule::Capturer> delegate_;
+ const EchoEmulationConfig config_;
+
+ SwapQueue<std::vector<int16_t>> renderer_queue_;
+
+ SequenceChecker renderer_thread_;
+ std::vector<int16_t> queue_input_ RTC_GUARDED_BY(renderer_thread_);
+ bool recording_started_ RTC_GUARDED_BY(renderer_thread_) = false;
+
+ SequenceChecker capturer_thread_;
+ std::vector<int16_t> queue_output_ RTC_GUARDED_BY(capturer_thread_);
+ bool delay_accumulated_ RTC_GUARDED_BY(capturer_thread_) = false;
+};
+
+// Renders output into provided renderer and also copy output into provided
+// EchoEmulationCapturer.
+class EchoEmulatingRenderer : public TestAudioDeviceModule::Renderer {
+ public:
+ EchoEmulatingRenderer(
+ std::unique_ptr<TestAudioDeviceModule::Renderer> renderer,
+ EchoEmulatingCapturer* echo_emulating_capturer);
+
+ int SamplingFrequency() const override {
+ return delegate_->SamplingFrequency();
+ }
+ int NumChannels() const override { return delegate_->NumChannels(); }
+ bool Render(rtc::ArrayView<const int16_t> data) override;
+
+ private:
+ std::unique_ptr<TestAudioDeviceModule::Renderer> delegate_;
+ EchoEmulatingCapturer* echo_emulating_capturer_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_ECHO_ECHO_EMULATION_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/architecture.md b/third_party/libwebrtc/test/pc/e2e/g3doc/architecture.md
new file mode 100644
index 0000000000..1b68c6db2c
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/architecture.md
@@ -0,0 +1,209 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'titovartem' reviewed: '2021-04-12'} *-->
+
+# PeerConnection level framework fixture architecture
+
+## Overview
+
+The main implementation of
+[`webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture`][1] is
+[`webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTest`][2]. Internally it owns
+the next main pieces:
+
+* [`MediaHelper`][3] - responsible for adding audio and video tracks to the
+ peers.
+* [`VideoQualityAnalyzerInjectionHelper`][4] and
+ [`SingleProcessEncodedImageDataInjector`][5] - used to inject video quality
+ analysis and properly match captured and rendered video frames. You can read
+ more about it in
+ [DefaultVideoQualityAnalyzer](default_video_quality_analyzer.md) section.
+* [`AudioQualityAnalyzerInterface`][6] - used to measure audio quality metrics
+* [`TestActivitiesExecutor`][7] - used to support [`ExecuteAt(...)`][8] and
+ [`ExecuteEvery(...)`][9] API of `PeerConnectionE2EQualityTestFixture` to run
+ any arbitrary action during test execution timely synchronized with a test
+ call.
+* A vector of [`QualityMetricsReporter`][10] added by the
+ `PeerConnectionE2EQualityTestFixture` user.
+* Two peers: Alice and Bob represented by instances of [`TestPeer`][11]
+ object.
+
+Also it keeps a reference to [`webrtc::TimeController`][12], which is used to
+create all required threads, task queues, task queue factories and time related
+objects.
+
+## TestPeer
+
+Call participants are represented by instances of `TestPeer` object.
+[`TestPeerFactory`][13] is used to create them. `TestPeer` owns all instances
+related to the `webrtc::PeerConnection`, including required listeners and
+callbacks. Also it provides an API to do offer/answer exchange and ICE candidate
+exchange. For this purposes internally it uses an instance of
+[`webrtc::PeerConnectionWrapper`][14].
+
+The `TestPeer` also owns the `PeerConnection` worker thread. The signaling
+thread for all `PeerConnection`'s is owned by
+`PeerConnectionE2EQualityTestFixture` and shared between all participants in the
+call. The network thread is owned by the network layer (it maybe either emulated
+network provided by [Network Emulation Framework][24] or network thread and
+`rtc::NetworkManager` provided by user) and provided when peer is added to the
+fixture via [`AddPeer(...)`][15] API.
+
+## GetStats API based metrics reporters
+
+`PeerConnectionE2EQualityTestFixture` gives the user ability to provide
+different `QualityMetricsReporter`s which will listen for `PeerConnection`
+[`GetStats`][16] API. Then such reporters will be able to report various metrics
+that user wants to measure.
+
+`PeerConnectionE2EQualityTestFixture` itself also uses this mechanism to
+measure:
+
+* Audio quality metrics
+* Audio/Video sync metrics (with help of [`CrossMediaMetricsReporter`][17])
+
+Also framework provides a [`StatsBasedNetworkQualityMetricsReporter`][18] to
+measure network related WebRTC metrics and print debug raw emulated network
+statistic. This reporter should be added by user via
+[`AddQualityMetricsReporter(...)`][19] API if requried.
+
+Internally stats gathering is done by [`StatsPoller`][20]. Stats are requested
+once per second for each `PeerConnection` and then resulted object is provided
+into each stats listener.
+
+## Offer/Answer exchange
+
+`PeerConnectionE2EQualityTest` provides ability to test Simulcast and SVC for
+video. These features aren't supported by P2P call and in general requires a
+Selective Forwarding Unit (SFU). So special logic is applied to mimic SFU
+behavior in P2P call. This logic is located inside [`SignalingInterceptor`][21],
+[`QualityAnalyzingVideoEncoder`][22] and [`QualityAnalyzingVideoDecoder`][23]
+and consist of SDP modification during offer/answer exchange and special
+handling of video frames from unrelated Simulcast/SVC streams during decoding.
+
+### Simulcast
+
+In case of Simulcast we have a video track, which internally contains multiple
+video streams, for example low resolution, medium resolution and high
+resolution. WebRTC client doesn't support receiving an offer with multiple
+streams in it, because usually SFU will keep only single stream for the client.
+To bypass it framework will modify offer by converting a single track with three
+video streams into three independent video tracks. Then sender will think that
+it send simulcast, but receiver will think that it receives 3 independent
+tracks.
+
+To achieve such behavior some extra tweaks are required:
+
+* MID RTP header extension from original offer have to be removed
+* RID RTP header extension from original offer is replaced with MID RTP header
+ extension, so the ID that sender uses for RID on receiver will be parsed as
+ MID.
+* Answer have to be modified in the opposite way.
+
+Described modifications are illustrated on the picture below.
+
+![VP8 Simulcast offer modification](vp8_simulcast_offer_modification.png "VP8 Simulcast offer modification")
+
+The exchange will look like this:
+
+1. Alice creates an offer
+2. Alice sets offer as local description
+3. Do described offer modification
+4. Alice sends modified offer to Bob
+5. Bob sets modified offer as remote description
+6. Bob creates answer
+7. Bob sets answer as local description
+8. Do reverse modifications on answer
+9. Bob sends modified answer to Alice
+10. Alice sets modified answer as remote description
+
+Such mechanism put a constraint that RTX streams are not supported, because they
+don't have RID RTP header extension in their packets.
+
+### SVC
+
+In case of SVC the framework will update the sender's offer before even setting
+it as local description on the sender side. Then no changes to answer will be
+required.
+
+`ssrc` is a 32 bit random value that is generated in RTP to denote a specific
+source used to send media in an RTP connection. In original offer video track
+section will look like this:
+
+```
+m=video 9 UDP/TLS/RTP/SAVPF 98 100 99 101
+...
+a=ssrc-group:FID <primary ssrc> <retransmission ssrc>
+a=ssrc:<primary ssrc> cname:...
+....
+a=ssrc:<retransmission ssrc> cname:...
+....
+```
+
+To enable SVC for such video track framework will add extra `ssrc`s for each SVC
+stream that is required like this:
+
+```
+a=ssrc-group:FID <Low resolution primary ssrc> <Low resolution retransmission ssrc>
+a=ssrc:<Low resolution primary ssrc> cname:...
+....
+a=ssrc:<Low resolution retransmission ssrc> cname:....
+...
+a=ssrc-group:FID <Medium resolution primary ssrc> <Medium resolution retransmission ssrc>
+a=ssrc:<Medium resolution primary ssrc> cname:...
+....
+a=ssrc:<Medium resolution retransmission ssrc> cname:....
+...
+a=ssrc-group:FID <High resolution primary ssrc> <High resolution retransmission ssrc>
+a=ssrc:<High resolution primary ssrc> cname:...
+....
+a=ssrc:<High resolution retransmission ssrc> cname:....
+...
+```
+
+The next line will also be added to the video track section of the offer:
+
+```
+a=ssrc-group:SIM <Low resolution primary ssrc> <Medium resolution primary ssrc> <High resolution primary ssrc>
+```
+
+It will tell PeerConnection that this track should be configured as SVC. It
+utilize WebRTC Plan B offer structure to achieve SVC behavior, also it modifies
+offer before setting it as local description which violates WebRTC standard.
+Also it adds limitations that on lossy networks only top resolution streams can
+be analyzed, because WebRTC won't try to restore low resolution streams in case
+of loss, because it still receives higher stream.
+
+### Handling in encoder/decoder
+
+In the encoder, the framework for each encoded video frame will propagate
+information requried for the fake SFU to know if it belongs to an interesting
+simulcast stream/spatial layer of if it should be "discarded".
+
+On the decoder side frames that should be "discarded" by fake SFU will be auto
+decoded into single pixel images and only the interesting simulcast
+stream/spatial layer will go into real decoder and then will be analyzed.
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=55;drc=484acf27231d931dbc99aedce85bc27e06486b96
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/peer_connection_quality_test.h;l=44;drc=6cc893ad778a0965e2b7a8e614f3c98aa81bee5b
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/media/media_helper.h;l=27;drc=d46db9f1523ae45909b4a6fdc90a140443068bc6
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h;l=38;drc=79020414fd5c71f9ec1f25445ea5f1c8001e1a49
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h;l=40;drc=79020414fd5c71f9ec1f25445ea5f1c8001e1a49
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/audio_quality_analyzer_interface.h;l=23;drc=20f45823e37fd7272aa841831c029c21f29742c2
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/test_activities_executor.h;l=28;drc=6cc893ad778a0965e2b7a8e614f3c98aa81bee5b
+[8]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=439;drc=484acf27231d931dbc99aedce85bc27e06486b96
+[9]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=445;drc=484acf27231d931dbc99aedce85bc27e06486b96
+[10]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=413;drc=9438fb3fff97c803d1ead34c0e4f223db168526f
+[11]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/test_activities_executor.h;l=28;drc=6cc893ad778a0965e2b7a8e614f3c98aa81bee5b
+[12]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/test_activities_executor.h;l=28;drc=6cc893ad778a0965e2b7a8e614f3c98aa81bee5b
+[13]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/test_peer_factory.h;l=46;drc=0ef4a2488a466a24ab97b31fdddde55440d451f9
+[14]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/peer_connection_wrapper.h;l=47;drc=5ab79e62f691875a237ea28ca3975ea1f0ed62ec
+[15]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=459;drc=484acf27231d931dbc99aedce85bc27e06486b96
+[16]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/peer_connection_interface.h;l=886;drc=9438fb3fff97c803d1ead34c0e4f223db168526f
+[17]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/cross_media_metrics_reporter.h;l=29;drc=9d777620236ec76754cfce19f6e82dd18e52d22c
+[18]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/cross_media_metrics_reporter.h;l=29;drc=9d777620236ec76754cfce19f6e82dd18e52d22c
+[19]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=450;drc=484acf27231d931dbc99aedce85bc27e06486b96
+[20]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/stats_poller.h;l=52;drc=9b526180c9e9722d3fc7f8689da6ec094fc7fc0a
+[21]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/sdp/sdp_changer.h;l=79;drc=ee558dcca89fd8b105114ededf9e74d948da85e8
+[22]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h;l=54;drc=79020414fd5c71f9ec1f25445ea5f1c8001e1a49
+[23]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h;l=50;drc=79020414fd5c71f9ec1f25445ea5f1c8001e1a49
+[24]: /test/network/g3doc/index.md
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/default_video_quality_analyzer.md b/third_party/libwebrtc/test/pc/e2e/g3doc/default_video_quality_analyzer.md
new file mode 100644
index 0000000000..67596777f2
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/default_video_quality_analyzer.md
@@ -0,0 +1,197 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'titovartem' reviewed: '2021-02-21'} *-->
+
+# DefaultVideoQualityAnalyzer
+
+## Audience
+
+This document is for users of
+[`webrtc::webrtc_pc_e2e::DefaultVideoQualityAnalyzer`][1].
+
+## Overview
+
+`DefaultVideoQualityAnalyzer` implements
+[`webrtc::VideoQualityAnalyzerInterface`][2] and is a main
+implementation of video quality analyzer for WebRTC. To operate correctly it
+requires to receive video frame on each step:
+
+1. On frame captured - analyzer will generate a unique ID for the frame, that
+ caller should attach to the it.
+2. Immediately before frame enter the encoder.
+3. Immediately after the frame was encoded.
+4. After the frame was received and immediately before it entered the decoder.
+5. Immediately after the frame was decoded.
+6. When the frame was rendered.
+
+![VideoQualityAnalyzerInterface pipeline](video_quality_analyzer_pipeline.png "VideoQualityAnalyzerInterface pipeline")
+
+The analyzer updates its internal metrics per frame when it was rendered and
+reports all of them after it was stopped through
+[WebRTC perf results reporting system][10].
+
+To properly inject `DefaultVideoQualityAnalyzer` into pipeline the following helpers can be used:
+
+### VideoQualityAnalyzerInjectionHelper
+
+[`webrtc::webrtc_pc_e2e::VideoQualityAnalyzerInjectionHelper`][3] provides
+factory methods for components, that will be used to inject
+`VideoQualityAnalyzerInterface` into the `PeerConnection` pipeline:
+
+* Wrappers for [`webrtc::VideoEncoderFactory`][4] and
+ [`webrtc::VideoDecodeFactory`][5] which will properly pass
+ [`webrtc::VideoFrame`][6] and [`webrtc::EncodedImage`][7] into analyzer
+ before and after real video encode and decoder.
+* [`webrtc::test::TestVideoCapturer::FramePreprocessor`][8] which is used to
+ pass generated frames into analyzer on capturing and then set the returned
+ frame ID. It also configures dumping of captured frames if requried.
+* [`rtc::VideoSinkInterface<VideoFrame>`][9] which is used to pass frames to
+ the analyzer before they will be rendered to compute per frame metrics. It
+ also configures dumping of rendered video if requried.
+
+Besides factories `VideoQualityAnalyzerInjectionHelper` has method to
+orchestrate `VideoQualityAnalyzerInterface` workflow:
+
+* `Start` - to start video analyzer, so it will be able to receive and analyze
+ video frames.
+* `RegisterParticipantInCall` - to add new participants after analyzer was
+ started.
+* `Stop` - to stop analyzer, compute all metrics for frames that were recevied
+ before and report them.
+
+Also `VideoQualityAnalyzerInjectionHelper` implements
+[`webrtc::webrtc_pc_e2e::StatsObserverInterface`][11] to propagate WebRTC stats
+to `VideoQualityAnalyzerInterface`.
+
+### EncodedImageDataInjector and EncodedImageDataExtractor
+
+[`webrtc::webrtc_pc_e2e::EncodedImageDataInjector`][14] and
+[`webrtc::webrtc_pc_e2e::EncodedImageDataInjector`][15] are used to inject and
+extract data into `webrtc::EncodedImage` to propagate frame ID and other
+required information through the network.
+
+By default [`webrtc::webrtc_pc_e2e::SingleProcessEncodedImageDataInjector`][16]
+is used. It assumes `webrtc::EncodedImage` payload as black box which is
+remaining unchanged from encoder to decoder and stores the information required
+for its work in the last 3 bytes of the payload, replacing the original data
+during injection and restoring it back during extraction. Also
+`SingleProcessEncodedImageDataInjector` requires that sender and receiver were
+inside single process.
+
+![SingleProcessEncodedImageDataInjector](single_process_encoded_image_data_injector.png "SingleProcessEncodedImageDataInjector")
+
+## Exported metrics
+
+Exported metrics are reported to WebRTC perf results reporting system.
+
+### General
+
+* *`cpu_usage`* - CPU usage excluding video analyzer
+
+### Video
+
+* *`psnr`* - peak signal-to-noise ratio:
+ [wikipedia](https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio)
+* *`ssim`* - structural similarity:
+ [wikipedia](https://en.wikipedia.org/wiki/Structural_similarity).
+* *`min_psnr`* - minimum value of psnr across all frames of video stream.
+* *`encode_time`* - time to encode a single frame.
+* *`decode_time`* - time to decode a single frame.
+* *`transport_time`* - time from frame encoded to frame received for decoding.
+* *`receive_to_render_time`* - time from frame received for decoding to frame
+ rendered.
+* *`total_delay_incl_transport`* - time from frame was captured on device to
+ time when frame was displayed on device.
+* *`encode_frame_rate`* - frame rate after encoder.
+* *`harmonic_framerate`* - video duration divided on squared sum of interframe
+ delays. Reflects render frame rate penalized by freezes.
+* *`time_between_rendered_frames`* - time between frames out to renderer.
+* *`dropped_frames`* - amount of frames that were sent, but weren't rendered
+ and are known not to be “on the way†from sender to receiver.
+
+Freeze is a pause when no new frames from decoder arrived for 150ms + avg time
+between frames or 3 * avg time between frames.
+
+* *`time_between_freezes`* - mean time from previous freeze end to new freeze
+ start.
+* *`freeze_time_ms`* - total freeze time in ms.
+* *`max_skipped`* - frames skipped between two nearest rendered.
+* *`pixels_per_frame`* - amount of pixels on frame (width * height).
+* *`target_encode_bitrate`* - target encode bitrate provided by BWE to
+ encoder.
+* *`actual_encode_bitrate -`* - actual encode bitrate produced by encoder.
+* *`available_send_bandwidth -`* - available send bandwidth estimated by BWE.
+* *`transmission_bitrate`* - bitrate of media in the emulated network, not
+ counting retransmissions FEC, and RTCP messages
+* *`retransmission_bitrate`* - bitrate of retransmission streams only.
+
+### Framework stability
+
+* *`frames_in_flight`* - amount of frames that were captured but wasn't seen
+ on receiver.
+
+## Debug metrics
+
+Debug metrics are not reported to WebRTC perf results reporting system, but are
+available through `DefaultVideoQualityAnalyzer` API.
+
+### [FrameCounters][12]
+
+Frame counters consist of next counters:
+
+* *`captured`* - count of frames, that were passed into WebRTC pipeline by
+ video stream source
+* *`pre_encoded`* - count of frames that reached video encoder.
+* *`encoded`* - count of encoded images that were produced by encoder for all
+ requested spatial layers and simulcast streams.
+* *`received`* - count of encoded images received in decoder for all requested
+ spatial layers and simulcast streams.
+* *`decoded`* - count of frames that were produced by decoder.
+* *`rendered`* - count of frames that went out from WebRTC pipeline to video
+ sink.
+* *`dropped`* - count of frames that were dropped in any point between
+ capturing and rendering.
+
+`DefaultVideoQualityAnalyzer` exports these frame counters:
+
+* *`GlobalCounters`* - frame counters for frames met on each stage of analysis
+ for all media streams.
+* *`PerStreamCounters`* - frame counters for frames met on each stage of
+ analysis separated per individual video track (single media section in the
+ SDP offer).
+
+### [AnalyzerStats][13]
+
+Contains metrics about internal state of video analyzer during its work
+
+* *`comparisons_queue_size`* - size of analyzer internal queue used to perform
+ captured and rendered frames comparisons measured when new element is added
+ to the queue.
+* *`comparisons_done`* - number of performed comparisons of 2 video frames
+ from captured and rendered streams.
+* *`cpu_overloaded_comparisons_done`* - number of cpu overloaded comparisons.
+ Comparison is cpu overloaded if it is queued when there are too many not
+ processed comparisons in the queue. Overloaded comparison doesn't include
+ metrics like SSIM and PSNR that require heavy computations.
+* *`memory_overloaded_comparisons_done`* - number of memory overloaded
+ comparisons. Comparison is memory overloaded if it is queued when its
+ captured frame was already removed due to high memory usage for that video
+ stream.
+* *`frames_in_flight_left_count`* - count of frames in flight in analyzer
+ measured when new comparison is added and after analyzer was stopped.
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h;l=188;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[2]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/test/video_quality_analyzer_interface.h;l=56;drc=d7808f1c464a07c8f1e2f97ec7ee92fda998d590
+[3]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h;l=39;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[4]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/video_codecs/video_encoder_factory.h;l=27;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[5]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/video_codecs/video_decoder_factory.h;l=27;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[6]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/video/video_frame.h;l=30;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[7]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/video/encoded_image.h;l=71;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[8]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/test_video_capturer.h;l=28;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[9]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/video/video_sink_interface.h;l=19;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[10]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/testsupport/perf_test.h;drc=0710b401b1e5b500b8e84946fb657656ba1b58b7
+[11]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/api/test/stats_observer_interface.h;l=21;drc=9b526180c9e9722d3fc7f8689da6ec094fc7fc0a
+[12]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h;l=57;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[13]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h;l=113;drc=08f46909a8735cf181b99ef2f7e1791c5a7531d2
+[14]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h;l=23;drc=c57089a97a3df454f4356d882cc8df173e8b3ead
+[15]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/encoded_image_data_injector.h;l=46;drc=c57089a97a3df454f4356d882cc8df173e8b3ead
+[16]: https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h;l=40;drc=c57089a97a3df454f4356d882cc8df173e8b3ead
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/in_test_psnr_plot.png b/third_party/libwebrtc/test/pc/e2e/g3doc/in_test_psnr_plot.png
new file mode 100644
index 0000000000..3f36725727
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/in_test_psnr_plot.png
Binary files differ
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/index.md b/third_party/libwebrtc/test/pc/e2e/g3doc/index.md
new file mode 100644
index 0000000000..678262bb2b
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/index.md
@@ -0,0 +1,224 @@
+<!-- go/cmark -->
+<!--* freshness: {owner: 'titovartem' reviewed: '2021-04-12'} *-->
+
+# PeerConnection Level Framework
+
+## API
+
+* [Fixture][1]
+* [Fixture factory function][2]
+
+## Documentation
+
+The PeerConnection level framework is designed for end-to-end media quality
+testing through the PeerConnection level public API. The framework uses the
+*Unified plan* API to generate offers/answers during the signaling phase. The
+framework also wraps the video encoder/decoder and inject it into
+*`webrtc::PeerConnection`* to measure video quality, performing 1:1 frames
+matching between captured and rendered frames without any extra requirements to
+input video. For audio quality evaluation the standard `GetStats()` API from
+PeerConnection is used.
+
+The framework API is located in the namespace *`webrtc::webrtc_pc_e2e`*.
+
+### Supported features
+
+* Single or bidirectional media in the call
+* RTC Event log dump per peer
+* AEC dump per peer
+* Compatible with *`webrtc::TimeController`* for both real and simulated time
+* Media
+ * AV sync
+* Video
+ * Any amount of video tracks both from caller and callee sides
+ * Input video from
+ * Video generator
+ * Specified file
+ * Any instance of *`webrtc::test::FrameGeneratorInterface`*
+ * Dumping of captured/rendered video into file
+ * Screen sharing
+ * Vp8 simulcast from caller side
+ * Vp9 SVC from caller side
+ * Choosing of video codec (name and parameters), having multiple codecs
+ negotiated to support codec-switching testing.
+ * FEC (ULP or Flex)
+ * Forced codec overshooting (for encoder overshoot emulation on some
+ mobile devices, when hardware encoder can overshoot target bitrate)
+* Audio
+ * Up to 1 audio track both from caller and callee sides
+ * Generated audio
+ * Audio from specified file
+ * Dumping of captured/rendered audio into file
+ * Parameterizing of `cricket::AudioOptions`
+ * Echo emulation
+* Injection of various WebRTC components into underlying
+ *`webrtc::PeerConnection`* or *`webrtc::PeerConnectionFactory`*. You can see
+ the full list [here][11]
+* Scheduling of events, that can happen during the test, for example:
+ * Changes in network configuration
+ * User statistics measurements
+ * Custom defined actions
+* User defined statistics reporting via
+ *`webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::QualityMetricsReporter`*
+ interface
+
+## Exported metrics
+
+### General
+
+* *`<peer_name>_connected`* - peer successfully established connection to
+ remote side
+* *`cpu_usage`* - CPU usage excluding video analyzer
+* *`audio_ahead_ms`* - Used to estimate how much audio and video is out of
+ sync when the two tracks were from the same source. Stats are polled
+ periodically during a call. The metric represents how much earlier was audio
+ played out on average over the call. If, during a stats poll, video is
+ ahead, then audio_ahead_ms will be equal to 0 for this poll.
+* *`video_ahead_ms`* - Used to estimate how much audio and video is out of
+ sync when the two tracks were from the same source. Stats are polled
+ periodically during a call. The metric represents how much earlier was video
+ played out on average over the call. If, during a stats poll, audio is
+ ahead, then video_ahead_ms will be equal to 0 for this poll.
+
+### Video
+
+See documentation for
+[*`DefaultVideoQualityAnalyzer`*](default_video_quality_analyzer.md#exported-metrics)
+
+### Audio
+
+* *`accelerate_rate`* - when playout is sped up, this counter is increased by
+ the difference between the number of samples received and the number of
+ samples played out. If speedup is achieved by removing samples, this will be
+ the count of samples removed. Rate is calculated as difference between
+ nearby samples divided on sample interval.
+* *`expand_rate`* - the total number of samples that are concealed samples
+ over time. A concealed sample is a sample that was replaced with synthesized
+ samples generated locally before being played out. Examples of samples that
+ have to be concealed are samples from lost packets or samples from packets
+ that arrive too late to be played out
+* *`speech_expand_rate`* - the total number of samples that are concealed
+ samples minus the total number of concealed samples inserted that are
+ "silent" over time. Playing out silent samples results in silence or comfort
+ noise.
+* *`preemptive_rate`* - when playout is slowed down, this counter is increased
+ by the difference between the number of samples received and the number of
+ samples played out. If playout is slowed down by inserting samples, this
+ will be the number of inserted samples. Rate is calculated as difference
+ between nearby samples divided on sample interval.
+* *`average_jitter_buffer_delay_ms`* - average size of NetEQ jitter buffer.
+* *`preferred_buffer_size_ms`* - preferred size of NetEQ jitter buffer.
+* *`visqol_mos`* - proxy for audio quality itself.
+* *`asdm_samples`* - measure of how much acceleration/deceleration was in the
+ signal.
+* *`word_error_rate`* - measure of how intelligible the audio was (percent of
+ words that could not be recognized in output audio).
+
+### Network
+
+* *`bytes_sent`* - represents the total number of payload bytes sent on this
+ PeerConnection, i.e., not including headers or padding
+* *`packets_sent`* - represents the total number of packets sent over this
+ PeerConnection’s transports.
+* *`average_send_rate`* - average send rate calculated on bytes_sent divided
+ by test duration.
+* *`payload_bytes_sent`* - total number of bytes sent for all SSRC plus total
+ number of RTP header and padding bytes sent for all SSRC. This does not
+ include the size of transport layer headers such as IP or UDP.
+* *`sent_packets_loss`* - packets_sent minus corresponding packets_received.
+* *`bytes_received`* - represents the total number of bytes received on this
+ PeerConnection, i.e., not including headers or padding.
+* *`packets_received`* - represents the total number of packets received on
+ this PeerConnection’s transports.
+* *`average_receive_rate`* - average receive rate calculated on bytes_received
+ divided by test duration.
+* *`payload_bytes_received`* - total number of bytes received for all SSRC
+ plus total number of RTP header and padding bytes received for all SSRC.
+ This does not include the size of transport layer headers such as IP or UDP.
+
+### Framework stability
+
+* *`frames_in_flight`* - amount of frames that were captured but wasn't seen
+ on receiver in the way that also all frames after also weren't seen on
+ receiver.
+* *`bytes_discarded_no_receiver`* - total number of bytes that were received
+ on network interfaces related to the peer, but destination port was closed.
+* *`packets_discarded_no_receiver`* - total number of packets that were
+ received on network interfaces related to the peer, but destination port was
+ closed.
+
+## Examples
+
+Examples can be found in
+
+* [peer_connection_e2e_smoke_test.cc][3]
+* [pc_full_stack_tests.cc][4]
+
+## Stats plotting
+
+### Description
+
+Stats plotting provides ability to plot statistic collected during the test.
+Right now it is used in PeerConnection level framework and give ability to see
+how video quality metrics changed during test execution.
+
+### Usage
+
+To make any metrics plottable you need:
+
+1. Collect metric data with [SamplesStatsCounter][5] which internally will
+ store all intermediate points and timestamps when these points were added.
+2. Then you need to report collected data with
+ [`webrtc::test::PrintResult(...)`][6]. By using these method you will also
+ specify name of the plottable metric.
+
+After these steps it will be possible to export your metric for plotting. There
+are several options how you can do this:
+
+1. Use [`webrtc::TestMain::Create()`][7] as `main` function implementation, for
+ example use [`test/test_main.cc`][8] as `main` function for your test.
+
+ In such case your binary will have flag `--plot`, where you can provide a
+ list of metrics, that you want to plot or specify `all` to plot all
+ available metrics.
+
+ If `--plot` is specified, the binary will output metrics data into `stdout`.
+ Then you need to pipe this `stdout` into python plotter script
+ [`rtc_tools/metrics_plotter.py`][9], which will plot data.
+
+ Examples:
+
+ ```shell
+ $ ./out/Default/test_support_unittests \
+ --gtest_filter=PeerConnectionE2EQualityTestSmokeTest.Svc \
+ --nologs \
+ --plot=all \
+ | python rtc_tools/metrics_plotter.py
+ ```
+
+ ```shell
+ $ ./out/Default/test_support_unittests \
+ --gtest_filter=PeerConnectionE2EQualityTestSmokeTest.Svc \
+ --nologs \
+ --plot=psnr,ssim \
+ | python rtc_tools/metrics_plotter.py
+ ```
+
+ Example chart: ![PSNR changes during the test](in_test_psnr_plot.png)
+
+2. Use API from [`test/testsupport/perf_test.h`][10] directly by invoking
+ `webrtc::test::PrintPlottableResults(const std::vector<std::string>&
+ desired_graphs)` to print plottable metrics to stdout. Then as in previous
+ option you need to pipe result into plotter script.
+
+[1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;drc=cbe6e8a2589a925d4c91a2ac2c69201f03de9c39
+[2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/create_peerconnection_quality_test_fixture.h;drc=cbe6e8a2589a925d4c91a2ac2c69201f03de9c39
+[3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc;drc=cbe6e8a2589a925d4c91a2ac2c69201f03de9c39
+[4]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/video/pc_full_stack_tests.cc;drc=cbe6e8a2589a925d4c91a2ac2c69201f03de9c39
+[5]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/numerics/samples_stats_counter.h;drc=cbe6e8a2589a925d4c91a2ac2c69201f03de9c39
+[6]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/testsupport/perf_test.h;l=86;drc=0710b401b1e5b500b8e84946fb657656ba1b58b7
+[7]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/test_main_lib.h;l=23;drc=bcb42f1e4be136c390986a40d9d5cb3ad0de260b
+[8]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/test_main.cc;drc=bcb42f1e4be136c390986a40d9d5cb3ad0de260b
+[9]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/rtc_tools/metrics_plotter.py;drc=8cc6695652307929edfc877cd64b75cd9ec2d615
+[10]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/test/testsupport/perf_test.h;l=105;drc=0710b401b1e5b500b8e84946fb657656ba1b58b7
+[11]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/test/peerconnection_quality_test_fixture.h;l=272;drc=484acf27231d931dbc99aedce85bc27e06486b96
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/single_process_encoded_image_data_injector.png b/third_party/libwebrtc/test/pc/e2e/g3doc/single_process_encoded_image_data_injector.png
new file mode 100644
index 0000000000..73480bafbe
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/single_process_encoded_image_data_injector.png
Binary files differ
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/video_quality_analyzer_pipeline.png b/third_party/libwebrtc/test/pc/e2e/g3doc/video_quality_analyzer_pipeline.png
new file mode 100644
index 0000000000..6cddb91110
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/video_quality_analyzer_pipeline.png
Binary files differ
diff --git a/third_party/libwebrtc/test/pc/e2e/g3doc/vp8_simulcast_offer_modification.png b/third_party/libwebrtc/test/pc/e2e/g3doc/vp8_simulcast_offer_modification.png
new file mode 100644
index 0000000000..c7eaa04c0e
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/g3doc/vp8_simulcast_offer_modification.png
Binary files differ
diff --git a/third_party/libwebrtc/test/pc/e2e/media/media_helper.cc b/third_party/libwebrtc/test/pc/e2e/media/media_helper.cc
new file mode 100644
index 0000000000..e945bd4dae
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/media/media_helper.cc
@@ -0,0 +1,128 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/media/media_helper.h"
+
+#include <string>
+#include <utility>
+
+#include "absl/types/variant.h"
+#include "api/media_stream_interface.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "test/frame_generator_capturer.h"
+#include "test/platform_video_capturer.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+void MediaHelper::MaybeAddAudio(TestPeer* peer) {
+ if (!peer->params().audio_config) {
+ return;
+ }
+ const AudioConfig& audio_config = peer->params().audio_config.value();
+ rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+ peer->pc_factory()->CreateAudioSource(audio_config.audio_options);
+ rtc::scoped_refptr<AudioTrackInterface> track =
+ peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label,
+ source.get());
+ std::string sync_group = audio_config.sync_group
+ ? audio_config.sync_group.value()
+ : audio_config.stream_label.value() + "-sync";
+ peer->AddTrack(track, {sync_group, *audio_config.stream_label});
+}
+
+std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>
+MediaHelper::MaybeAddVideo(TestPeer* peer) {
+ // Params here valid because of pre-run validation.
+ const Params& params = peer->params();
+ const ConfigurableParams& configurable_params = peer->configurable_params();
+ std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>> out;
+ for (size_t i = 0; i < configurable_params.video_configs.size(); ++i) {
+ const VideoConfig& video_config = configurable_params.video_configs[i];
+ // Setup input video source into peer connection.
+ std::unique_ptr<test::TestVideoCapturer> capturer = CreateVideoCapturer(
+ video_config, peer->ReleaseVideoSource(i),
+ video_quality_analyzer_injection_helper_->CreateFramePreprocessor(
+ params.name.value(), video_config));
+ bool is_screencast =
+ video_config.content_hint == VideoTrackInterface::ContentHint::kText ||
+ video_config.content_hint ==
+ VideoTrackInterface::ContentHint::kDetailed;
+ rtc::scoped_refptr<TestVideoCapturerVideoTrackSource> source =
+ rtc::make_ref_counted<TestVideoCapturerVideoTrackSource>(
+ std::move(capturer), is_screencast);
+ out.push_back(source);
+ RTC_LOG(LS_INFO) << "Adding video with video_config.stream_label="
+ << video_config.stream_label.value();
+ rtc::scoped_refptr<VideoTrackInterface> track =
+ peer->pc_factory()->CreateVideoTrack(video_config.stream_label.value(),
+ source.get());
+ if (video_config.content_hint.has_value()) {
+ track->set_content_hint(video_config.content_hint.value());
+ }
+ std::string sync_group = video_config.sync_group
+ ? video_config.sync_group.value()
+ : video_config.stream_label.value() + "-sync";
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> sender =
+ peer->AddTrack(track, {sync_group, *video_config.stream_label});
+ RTC_CHECK(sender.ok());
+ if (video_config.temporal_layers_count ||
+ video_config.degradation_preference) {
+ RtpParameters rtp_parameters = sender.value()->GetParameters();
+ if (video_config.temporal_layers_count) {
+ for (auto& encoding_parameters : rtp_parameters.encodings) {
+ encoding_parameters.num_temporal_layers =
+ video_config.temporal_layers_count;
+ }
+ }
+ if (video_config.degradation_preference) {
+ rtp_parameters.degradation_preference =
+ video_config.degradation_preference;
+ }
+ RTCError res = sender.value()->SetParameters(rtp_parameters);
+ RTC_CHECK(res.ok()) << "Failed to set RTP parameters";
+ }
+ }
+ return out;
+}
+
+std::unique_ptr<test::TestVideoCapturer> MediaHelper::CreateVideoCapturer(
+ const VideoConfig& video_config,
+ PeerConfigurer::VideoSource source,
+ std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
+ frame_preprocessor) {
+ CapturingDeviceIndex* capturing_device_index =
+ absl::get_if<CapturingDeviceIndex>(&source);
+ if (capturing_device_index != nullptr) {
+ std::unique_ptr<test::TestVideoCapturer> capturer =
+ test::CreateVideoCapturer(video_config.width, video_config.height,
+ video_config.fps,
+ static_cast<size_t>(*capturing_device_index));
+ RTC_CHECK(capturer)
+ << "Failed to obtain input stream from capturing device #"
+ << *capturing_device_index;
+ capturer->SetFramePreprocessor(std::move(frame_preprocessor));
+ return capturer;
+ }
+
+ auto capturer = std::make_unique<test::FrameGeneratorCapturer>(
+ clock_,
+ absl::get<std::unique_ptr<test::FrameGeneratorInterface>>(
+ std::move(source)),
+ video_config.fps, *task_queue_factory_);
+ capturer->SetFramePreprocessor(std::move(frame_preprocessor));
+ capturer->Init();
+ return capturer;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/media/media_helper.h b/third_party/libwebrtc/test/pc/e2e/media/media_helper.h
new file mode 100644
index 0000000000..2d163d009e
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/media/media_helper.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_
+#define TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/test/frame_generator_interface.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
+#include "test/pc/e2e/media/test_video_capturer_video_track_source.h"
+#include "test/pc/e2e/test_peer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class MediaHelper {
+ public:
+ MediaHelper(VideoQualityAnalyzerInjectionHelper*
+ video_quality_analyzer_injection_helper,
+ TaskQueueFactory* task_queue_factory,
+ Clock* clock)
+ : clock_(clock),
+ task_queue_factory_(task_queue_factory),
+ video_quality_analyzer_injection_helper_(
+ video_quality_analyzer_injection_helper) {}
+
+ void MaybeAddAudio(TestPeer* peer);
+
+ std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>
+ MaybeAddVideo(TestPeer* peer);
+
+ private:
+ std::unique_ptr<test::TestVideoCapturer> CreateVideoCapturer(
+ const VideoConfig& video_config,
+ PeerConfigurer::VideoSource source,
+ std::unique_ptr<test::TestVideoCapturer::FramePreprocessor>
+ frame_preprocessor);
+
+ Clock* const clock_;
+ TaskQueueFactory* const task_queue_factory_;
+ VideoQualityAnalyzerInjectionHelper* video_quality_analyzer_injection_helper_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_MEDIA_MEDIA_HELPER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h b/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h
new file mode 100644
index 0000000000..c883a2e8e9
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_
+#define TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_source_interface.h"
+#include "pc/video_track_source.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class TestVideoCapturerVideoTrackSource : public VideoTrackSource {
+ public:
+ TestVideoCapturerVideoTrackSource(
+ std::unique_ptr<test::TestVideoCapturer> video_capturer,
+ bool is_screencast)
+ : VideoTrackSource(/*remote=*/false),
+ video_capturer_(std::move(video_capturer)),
+ is_screencast_(is_screencast) {}
+
+ ~TestVideoCapturerVideoTrackSource() = default;
+
+ void Start() { SetState(kLive); }
+
+ void Stop() { SetState(kMuted); }
+
+ bool is_screencast() const override { return is_screencast_; }
+
+ protected:
+ rtc::VideoSourceInterface<VideoFrame>* source() override {
+ return video_capturer_.get();
+ }
+
+ private:
+ std::unique_ptr<test::TestVideoCapturer> video_capturer_;
+ const bool is_screencast_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h b/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h
new file mode 100644
index 0000000000..fbcd3b90fe
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/metric_metadata_keys.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PC_E2E_METRIC_METADATA_KEYS_H_
+#define TEST_PC_E2E_METRIC_METADATA_KEYS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// All metadata fields are present only if applicable for particular metric.
+class MetricMetadataKey {
+ public:
+ // Represents on peer with whom the metric is associated.
+ static constexpr char kPeerMetadataKey[] = "peer";
+ // Represents sender of the media stream.
+ static constexpr char kSenderMetadataKey[] = "sender";
+ // Represents receiver of the media stream.
+ static constexpr char kReceiverMetadataKey[] = "receiver";
+ // Represents name of the audio stream.
+ static constexpr char kAudioStreamMetadataKey[] = "audio_stream";
+ // Represents name of the video stream.
+ static constexpr char kVideoStreamMetadataKey[] = "video_stream";
+ // Represents name of the sync group to which stream belongs.
+ static constexpr char kPeerSyncGroupMetadataKey[] = "peer_sync_group";
+ // Represents the test name (without any peer and stream data appended to it
+ // as it currently happens with the webrtc.test_metrics.Metric.test_case
+ // field). This metadata is temporary and it will be removed once this
+ // information is moved to webrtc.test_metrics.Metric.test_case.
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ static constexpr char kExperimentalTestNameMetadataKey[] =
+ "experimental_test_name";
+ // Represents index of a video spatial layer to which metric belongs.
+ static constexpr char kSpatialLayerMetadataKey[] = "spatial_layer";
+
+ private:
+ MetricMetadataKey() = default;
+};
+
+// All metadata fields are presented only if applicable for particular metric.
+class SampleMetadataKey {
+ public:
+ // Represents a frame ID with which data point is associated.
+ static constexpr char kFrameIdMetadataKey[] = "frame_id";
+
+ private:
+ SampleMetadataKey() = default;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_METRIC_METADATA_KEYS_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc
new file mode 100644
index 0000000000..0bb28f0847
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.cc
@@ -0,0 +1,183 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/network_quality_metrics_reporter.h"
+
+#include <utility>
+
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/metric.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1);
+
+// Field trial which controls whether to report standard-compliant bytes
+// sent/received per stream. If enabled, padding and headers are not included
+// in bytes sent or received.
+constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats";
+
+} // namespace
+
+NetworkQualityMetricsReporter::NetworkQualityMetricsReporter(
+ EmulatedNetworkManagerInterface* alice_network,
+ EmulatedNetworkManagerInterface* bob_network,
+ test::MetricsLogger* metrics_logger)
+ : alice_network_(alice_network),
+ bob_network_(bob_network),
+ metrics_logger_(metrics_logger) {
+ RTC_CHECK(metrics_logger_);
+}
+
+void NetworkQualityMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* /*reporter_helper*/) {
+ test_case_name_ = std::string(test_case_name);
+ // Check that network stats are clean before test execution.
+ EmulatedNetworkStats alice_stats = PopulateStats(alice_network_);
+ RTC_CHECK_EQ(alice_stats.overall_outgoing_stats.packets_sent, 0);
+ RTC_CHECK_EQ(alice_stats.overall_incoming_stats.packets_received, 0);
+ EmulatedNetworkStats bob_stats = PopulateStats(bob_network_);
+ RTC_CHECK_EQ(bob_stats.overall_outgoing_stats.packets_sent, 0);
+ RTC_CHECK_EQ(bob_stats.overall_incoming_stats.packets_received, 0);
+}
+
+void NetworkQualityMetricsReporter::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
+
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ for (const auto& stat : inbound_stats) {
+ payload_received +=
+ DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) +
+ stat->header_bytes_received.ValueOrDefault(0ul));
+ }
+
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ payload_sent +=
+ DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) +
+ stat->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ MutexLock lock(&lock_);
+ PCStats& stats = pc_stats_[std::string(pc_label)];
+ stats.payload_received = payload_received;
+ stats.payload_sent = payload_sent;
+}
+
+void NetworkQualityMetricsReporter::StopAndReportResults() {
+ EmulatedNetworkStats alice_stats = PopulateStats(alice_network_);
+ EmulatedNetworkStats bob_stats = PopulateStats(bob_network_);
+ int64_t alice_packets_loss =
+ alice_stats.overall_outgoing_stats.packets_sent -
+ bob_stats.overall_incoming_stats.packets_received;
+ int64_t bob_packets_loss =
+ bob_stats.overall_outgoing_stats.packets_sent -
+ alice_stats.overall_incoming_stats.packets_received;
+ ReportStats("alice", alice_stats, alice_packets_loss);
+ ReportStats("bob", bob_stats, bob_packets_loss);
+
+ if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) {
+ RTC_LOG(LS_ERROR)
+ << "Non-standard GetStats; \"payload\" counts include RTP headers";
+ }
+
+ MutexLock lock(&lock_);
+ for (const auto& pair : pc_stats_) {
+ ReportPCStats(pair.first, pair.second);
+ }
+}
+
+EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats(
+ EmulatedNetworkManagerInterface* network) {
+ rtc::Event wait;
+ EmulatedNetworkStats stats;
+ network->GetStats([&](EmulatedNetworkStats s) {
+ stats = std::move(s);
+ wait.Set();
+ });
+ bool stats_received = wait.Wait(kStatsWaitTimeout);
+ RTC_CHECK(stats_received);
+ return stats;
+}
+
+void NetworkQualityMetricsReporter::ReportStats(
+ const std::string& network_label,
+ const EmulatedNetworkStats& stats,
+ int64_t packet_loss) {
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_sent", GetTestCaseName(network_label),
+ stats.overall_outgoing_stats.bytes_sent.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_sent", GetTestCaseName(network_label),
+ stats.overall_outgoing_stats.packets_sent, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "average_send_rate", GetTestCaseName(network_label),
+ stats.overall_outgoing_stats.packets_sent >= 2
+ ? stats.overall_outgoing_stats.AverageSendRate().kbps<double>()
+ : 0,
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_discarded_no_receiver", GetTestCaseName(network_label),
+ stats.overall_incoming_stats.bytes_discarded_no_receiver.bytes(),
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_discarded_no_receiver", GetTestCaseName(network_label),
+ stats.overall_incoming_stats.packets_discarded_no_receiver,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_received", GetTestCaseName(network_label),
+ stats.overall_incoming_stats.bytes_received.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_received", GetTestCaseName(network_label),
+ stats.overall_incoming_stats.packets_received, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "average_receive_rate", GetTestCaseName(network_label),
+ stats.overall_incoming_stats.packets_received >= 2
+ ? stats.overall_incoming_stats.AverageReceiveRate().kbps<double>()
+ : 0,
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "sent_packets_loss", GetTestCaseName(network_label), packet_loss,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter);
+}
+
+void NetworkQualityMetricsReporter::ReportPCStats(const std::string& pc_label,
+ const PCStats& stats) {
+ metrics_logger_->LogSingleValueMetric(
+ "payload_bytes_received", pc_label, stats.payload_received.bytes(),
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter);
+ metrics_logger_->LogSingleValueMetric(
+ "payload_bytes_sent", pc_label, stats.payload_sent.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter);
+}
+
+std::string NetworkQualityMetricsReporter::GetTestCaseName(
+ const std::string& network_label) const {
+ return test_case_name_ + "/" + network_label;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.h b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.h
new file mode 100644
index 0000000000..ed894bcf54
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/network_quality_metrics_reporter.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_NETWORK_QUALITY_METRICS_REPORTER_H_
+#define TEST_PC_E2E_NETWORK_QUALITY_METRICS_REPORTER_H_
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/track_id_stream_info_map.h"
+#include "api/units/data_size.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class NetworkQualityMetricsReporter
+ : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
+ public:
+ NetworkQualityMetricsReporter(EmulatedNetworkManagerInterface* alice_network,
+ EmulatedNetworkManagerInterface* bob_network,
+ test::MetricsLogger* metrics_logger);
+ ~NetworkQualityMetricsReporter() override = default;
+
+ // Network stats must be empty when this method will be invoked.
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void StopAndReportResults() override;
+
+ private:
+ struct PCStats {
+ // TODO(nisse): Separate audio and video counters. Depends on standard stat
+ // counters, enabled by field trial "WebRTC-UseStandardBytesStats".
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
+ };
+
+ static EmulatedNetworkStats PopulateStats(
+ EmulatedNetworkManagerInterface* network);
+ void ReportStats(const std::string& network_label,
+ const EmulatedNetworkStats& stats,
+ int64_t packet_loss);
+ void ReportPCStats(const std::string& pc_label, const PCStats& stats);
+ std::string GetTestCaseName(const std::string& network_label) const;
+
+ std::string test_case_name_;
+
+ EmulatedNetworkManagerInterface* const alice_network_;
+ EmulatedNetworkManagerInterface* const bob_network_;
+ test::MetricsLogger* const metrics_logger_;
+ Mutex lock_;
+ std::map<std::string, PCStats> pc_stats_ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_NETWORK_QUALITY_METRICS_REPORTER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc
new file mode 100644
index 0000000000..0e7993e5be
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_e2e_smoke_test.cc
@@ -0,0 +1,536 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdint>
+#include <memory>
+#include <string>
+
+#include "api/media_stream_interface.h"
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/create_peer_connection_quality_test_frame_generator.h"
+#include "api/test/create_peerconnection_quality_test_fixture.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "call/simulated_network.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
+#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h"
+#include "test/testsupport/file_utils.h"
+
+#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
+#include "modules/video_coding/codecs/test/objc_codec_factory_helper.h" // nogncheck
+#endif
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ network_emulation_ = CreateNetworkEmulationManager();
+ auto video_quality_analyzer = std::make_unique<DefaultVideoQualityAnalyzer>(
+ network_emulation_->time_controller()->GetClock(),
+ test::GetGlobalMetricsLogger());
+ video_quality_analyzer_ = video_quality_analyzer.get();
+ fixture_ = CreatePeerConnectionE2EQualityTestFixture(
+ testing::UnitTest::GetInstance()->current_test_info()->name(),
+ *network_emulation_->time_controller(),
+ /*audio_quality_analyzer=*/nullptr, std::move(video_quality_analyzer));
+ }
+
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ CreateNetwork() {
+ EmulatedNetworkNode* alice_node = network_emulation_->CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+ EmulatedNetworkNode* bob_node = network_emulation_->CreateEmulatedNode(
+ std::make_unique<SimulatedNetwork>(BuiltInNetworkBehaviorConfig()));
+
+ EmulatedEndpoint* alice_endpoint =
+ network_emulation_->CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_emulation_->CreateEndpoint(EmulatedEndpointConfig());
+
+ network_emulation_->CreateRoute(alice_endpoint, {alice_node}, bob_endpoint);
+ network_emulation_->CreateRoute(bob_endpoint, {bob_node}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ network_emulation_->CreateEmulatedNetworkManagerInterface(
+ {alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ network_emulation_->CreateEmulatedNetworkManagerInterface(
+ {bob_endpoint});
+
+ return std::make_pair(alice_network, bob_network);
+ }
+
+ void AddPeer(EmulatedNetworkManagerInterface* network,
+ rtc::FunctionView<void(PeerConfigurer*)> update_configurer) {
+ auto configurer =
+ std::make_unique<PeerConfigurer>(network->network_dependencies());
+ update_configurer(configurer.get());
+ fixture_->AddPeer(std::move(configurer));
+ }
+
+ void RunAndCheckEachVideoStreamReceivedFrames(const RunParams& run_params) {
+ fixture_->Run(run_params);
+
+ EXPECT_GE(fixture_->GetRealTestDuration(), run_params.run_duration);
+ VideoStreamsInfo known_streams = video_quality_analyzer_->GetKnownStreams();
+ for (const StatsKey& stream_key : known_streams.GetStatsKeys()) {
+ FrameCounters stream_conters =
+ video_quality_analyzer_->GetPerStreamCounters().at(stream_key);
+ // On some devices the pipeline can be too slow, so we actually can't
+ // force real constraints here. Lets just check, that at least 1
+ // frame passed whole pipeline.
+ int64_t expected_min_fps = run_params.run_duration.seconds() * 15;
+ EXPECT_GE(stream_conters.captured, expected_min_fps)
+ << stream_key.ToString();
+ EXPECT_GE(stream_conters.pre_encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.encoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.received, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.decoded, 1) << stream_key.ToString();
+ EXPECT_GE(stream_conters.rendered, 1) << stream_key.ToString();
+ }
+ }
+
+ NetworkEmulationManager* network_emulation() {
+ return network_emulation_.get();
+ }
+
+ PeerConnectionE2EQualityTestFixture* fixture() { return fixture_.get(); }
+
+ private:
+ std::unique_ptr<NetworkEmulationManager> network_emulation_;
+ DefaultVideoQualityAnalyzer* video_quality_analyzer_;
+ std::unique_ptr<PeerConnectionE2EQualityTestFixture> fixture_;
+};
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Smoke DISABLED_Smoke
+#else
+#define MAYBE_Smoke Smoke
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "alice-video";
+ video.sync_group = "alice-media";
+ alice->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ audio.sampling_frequency_in_hz = 48000;
+ audio.sync_group = "alice-media";
+ alice->SetAudioConfig(std::move(audio));
+ alice->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+
+ alice->SetUseFlexFEC(true);
+ alice->SetUseUlpFEC(true);
+ alice->SetVideoEncoderBitrateMultiplier(1.1);
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* charlie) {
+ charlie->SetName("charlie");
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "charlie-video";
+ video.temporal_layers_count = 2;
+ charlie->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "charlie-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
+ charlie->SetAudioConfig(std::move(audio));
+ charlie->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+
+ charlie->SetUseFlexFEC(true);
+ charlie->SetUseUlpFEC(true);
+ charlie->SetVideoEncoderBitrateMultiplier(1.1);
+ });
+ fixture()->AddQualityMetricsReporter(
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ std::map<std::string, std::vector<EmulatedEndpoint*>>(
+ {{"alice", network_links.first->endpoints()},
+ {"charlie", network_links.second->endpoints()}}),
+ network_emulation(), test::GetGlobalMetricsLogger()));
+ RunParams run_params(TimeDelta::Seconds(2));
+ run_params.enable_flex_fec_support = true;
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Smoke DISABLED_Smoke
+#else
+#define MAYBE_SendAndReceivePacketsOnOneThread \
+ SmokeSendAndReceivePacketsOnOneThread
+#endif
+// Only use the network thread for sending and receiving packets.
+// The one and only network thread is used as a worker thread in all
+// PeerConnections. Pacing when sending packets is done on the worker thread.
+// See bugs.webrtc.org/14502.
+TEST_F(PeerConnectionE2EQualityTestSmokeTest,
+ MAYBE_SendAndReceivePacketsOnOneThread) {
+ test::ScopedFieldTrials trials(
+ std::string(field_trial::GetFieldTrialString()) +
+ "WebRTC-SendPacketsOnWorkerThread/Enabled/");
+
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ // Peerconnection use the network thread as the worker thread.
+ alice->SetUseNetworkThreadAsWorkerThread();
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "alice-video";
+ video.sync_group = "alice-media";
+ alice->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ audio.sampling_frequency_in_hz = 48000;
+ audio.sync_group = "alice-media";
+ alice->SetAudioConfig(std::move(audio));
+ alice->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* charlie) {
+ // Peerconnection use the network thread as the worker thread.
+ charlie->SetUseNetworkThreadAsWorkerThread();
+ charlie->SetName("charlie");
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "charlie-video";
+ video.temporal_layers_count = 2;
+ charlie->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "charlie-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
+ charlie->SetAudioConfig(std::move(audio));
+ charlie->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+ charlie->SetVideoEncoderBitrateMultiplier(1.1);
+ });
+ fixture()->AddQualityMetricsReporter(
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ std::map<std::string, std::vector<EmulatedEndpoint*>>(
+ {{"alice", network_links.first->endpoints()},
+ {"charlie", network_links.second->endpoints()}}),
+ network_emulation(), test::GetGlobalMetricsLogger()));
+ RunParams run_params(TimeDelta::Seconds(2));
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, SmokeH264) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "alice-video";
+ video.sync_group = "alice-media";
+ alice->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ audio.sampling_frequency_in_hz = 48000;
+ audio.sync_group = "alice-media";
+ alice->SetAudioConfig(std::move(audio));
+ alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)});
+ alice->SetVideoEncoderFactory(webrtc::test::CreateObjCEncoderFactory());
+ alice->SetVideoDecoderFactory(webrtc::test::CreateObjCDecoderFactory());
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* charlie) {
+ charlie->SetName("charlie");
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "charlie-video";
+ video.temporal_layers_count = 2;
+ charlie->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "charlie-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
+ charlie->SetAudioConfig(std::move(audio));
+ charlie->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)});
+ charlie->SetVideoEncoderFactory(webrtc::test::CreateObjCEncoderFactory());
+ charlie->SetVideoDecoderFactory(webrtc::test::CreateObjCDecoderFactory());
+ });
+
+ fixture()->AddQualityMetricsReporter(
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ std::map<std::string, std::vector<EmulatedEndpoint*>>(
+ {{"alice", network_links.first->endpoints()},
+ {"charlie", network_links.second->endpoints()}}),
+ network_emulation(), test::GetGlobalMetricsLogger()));
+ RunParams run_params(TimeDelta::Seconds(2));
+ run_params.enable_flex_fec_support = true;
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+#endif
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_ChangeNetworkConditions DISABLED_ChangeNetworkConditions
+#else
+#define MAYBE_ChangeNetworkConditions ChangeNetworkConditions
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_ChangeNetworkConditions) {
+ NetworkEmulationManager::SimulatedNetworkNode alice_node =
+ network_emulation()
+ ->NodeBuilder()
+ .config(BuiltInNetworkBehaviorConfig())
+ .Build();
+ NetworkEmulationManager::SimulatedNetworkNode bob_node =
+ network_emulation()
+ ->NodeBuilder()
+ .config(BuiltInNetworkBehaviorConfig())
+ .Build();
+
+ EmulatedEndpoint* alice_endpoint =
+ network_emulation()->CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_emulation()->CreateEndpoint(EmulatedEndpointConfig());
+
+ network_emulation()->CreateRoute(alice_endpoint, {alice_node.node},
+ bob_endpoint);
+ network_emulation()->CreateRoute(bob_endpoint, {bob_node.node},
+ alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ network_emulation()->CreateEmulatedNetworkManagerInterface(
+ {alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ network_emulation()->CreateEmulatedNetworkManagerInterface(
+ {bob_endpoint});
+
+ AddPeer(alice_network, [](PeerConfigurer* alice) {
+ VideoConfig video(160, 120, 15);
+ video.stream_label = "alice-video";
+ video.sync_group = "alice-media";
+ alice->AddVideoConfig(std::move(video));
+ alice->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+
+ alice->SetUseFlexFEC(true);
+ alice->SetUseUlpFEC(true);
+ alice->SetVideoEncoderBitrateMultiplier(1.1);
+ });
+ AddPeer(bob_network, [](PeerConfigurer* bob) {
+ bob->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+
+ bob->SetUseFlexFEC(true);
+ bob->SetUseUlpFEC(true);
+ bob->SetVideoEncoderBitrateMultiplier(1.1);
+ });
+ fixture()->AddQualityMetricsReporter(
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ std::map<std::string, std::vector<EmulatedEndpoint*>>(
+ {{"alice", alice_network->endpoints()},
+ {"bob", bob_network->endpoints()}}),
+ network_emulation(), test::GetGlobalMetricsLogger()));
+
+ fixture()->ExecuteAt(TimeDelta::Seconds(1), [alice_node](TimeDelta) {
+ BuiltInNetworkBehaviorConfig config;
+ config.loss_percent = 5;
+ alice_node.simulation->SetConfig(config);
+ });
+
+ RunParams run_params(TimeDelta::Seconds(2));
+ run_params.enable_flex_fec_support = true;
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Screenshare DISABLED_Screenshare
+#else
+#define MAYBE_Screenshare Screenshare
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Screenshare) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ VideoConfig screenshare(320, 180, 30);
+ screenshare.stream_label = "alice-screenshare";
+ screenshare.content_hint = VideoTrackInterface::ContentHint::kText;
+ ScreenShareConfig screen_share_config =
+ ScreenShareConfig(TimeDelta::Seconds(2));
+ screen_share_config.scrolling_params =
+ ScrollingParams{.duration = TimeDelta::Millis(1800)};
+ auto screen_share_frame_generator =
+ CreateScreenShareFrameGenerator(screenshare, screen_share_config);
+ alice->AddVideoConfig(std::move(screenshare),
+ std::move(screen_share_frame_generator));
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* bob) {});
+ RunAndCheckEachVideoStreamReceivedFrames(RunParams(TimeDelta::Seconds(2)));
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Echo DISABLED_Echo
+#else
+#define MAYBE_Echo Echo
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Echo) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ audio.sampling_frequency_in_hz = 48000;
+ alice->SetAudioConfig(std::move(audio));
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* bob) {
+ AudioConfig audio;
+ audio.stream_label = "bob-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_bob_source", "wav");
+ bob->SetAudioConfig(std::move(audio));
+ });
+ RunParams run_params(TimeDelta::Seconds(2));
+ run_params.echo_emulation_config = EchoEmulationConfig();
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Simulcast DISABLED_Simulcast
+#else
+#define MAYBE_Simulcast Simulcast
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Simulcast) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ VideoConfig simulcast(1280, 720, 15);
+ simulcast.stream_label = "alice-simulcast";
+ simulcast.simulcast_config = VideoSimulcastConfig(2);
+ simulcast.emulated_sfu_config = EmulatedSFUConfig(0);
+ alice->AddVideoConfig(std::move(simulcast));
+
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ alice->SetAudioConfig(std::move(audio));
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* bob) {});
+ RunParams run_params(TimeDelta::Seconds(2));
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_Svc DISABLED_Svc
+#else
+#define MAYBE_Svc Svc
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ VideoConfig simulcast("alice-svc", 1280, 720, 15);
+ // Because we have network with packets loss we can analyze only the
+ // highest spatial layer in SVC mode.
+ simulcast.simulcast_config = VideoSimulcastConfig(2);
+ simulcast.emulated_sfu_config = EmulatedSFUConfig(1);
+ alice->AddVideoConfig(std::move(simulcast));
+
+ AudioConfig audio("alice-audio");
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ alice->SetAudioConfig(std::move(audio));
+ alice->SetVideoCodecs({VideoCodecConfig(cricket::kVp9CodecName)});
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* bob) {
+ bob->SetVideoCodecs({VideoCodecConfig(cricket::kVp9CodecName)});
+ });
+ RunParams run_params(TimeDelta::Seconds(2));
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+// IOS debug builds can be quite slow, disabling to avoid issues with timeouts.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_HighBitrate DISABLED_HighBitrate
+#else
+#define MAYBE_HighBitrate HighBitrate
+#endif
+TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) {
+ std::pair<EmulatedNetworkManagerInterface*, EmulatedNetworkManagerInterface*>
+ network_links = CreateNetwork();
+ AddPeer(network_links.first, [](PeerConfigurer* alice) {
+ BitrateSettings bitrate_settings;
+ bitrate_settings.start_bitrate_bps = 3'000'000;
+ bitrate_settings.max_bitrate_bps = 3'000'000;
+ alice->SetBitrateSettings(bitrate_settings);
+ VideoConfig video(800, 600, 15);
+ video.stream_label = "alice-video";
+ RtpEncodingParameters encoding_parameters;
+ encoding_parameters.min_bitrate_bps = 500'000;
+ encoding_parameters.max_bitrate_bps = 3'000'000;
+ video.encoding_params.push_back(std::move(encoding_parameters));
+ alice->AddVideoConfig(std::move(video));
+
+ AudioConfig audio;
+ audio.stream_label = "alice-audio";
+ audio.mode = AudioConfig::Mode::kFile;
+ audio.input_file_name =
+ test::ResourcePath("pc_quality_smoke_test_alice_source", "wav");
+ audio.sampling_frequency_in_hz = 48000;
+ alice->SetAudioConfig(std::move(audio));
+ alice->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+ });
+ AddPeer(network_links.second, [](PeerConfigurer* bob) {
+ bob->SetVideoCodecs(
+ {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})});
+ });
+ RunParams run_params(TimeDelta::Seconds(2));
+ RunAndCheckEachVideoStreamReceivedFrames(run_params);
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc
new file mode 100644
index 0000000000..83613118f9
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.cc
@@ -0,0 +1,763 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/peer_connection_quality_test.h"
+
+#include <algorithm>
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "absl/strings/string_view.h"
+#include "api/jsep.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log_output_file.h"
+#include "api/scoped_refptr.h"
+#include "api/test/metrics/metric.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/time_controller.h"
+#include "api/test/video_quality_analyzer_interface.h"
+#include "pc/sdp_utils.h"
+#include "pc/test/mock_peer_connection_observers.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/cpu_info.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
+#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
+#include "test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h"
+#include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h"
+#include "test/pc/e2e/cross_media_metrics_reporter.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+#include "test/pc/e2e/peer_params_preprocessor.h"
+#include "test/pc/e2e/stats_poller.h"
+#include "test/pc/e2e/test_peer_factory.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(10);
+constexpr char kSignalThreadName[] = "signaling_thread";
+// 1 signaling, 2 network, 2 worker and 2 extra for codecs etc.
+constexpr int kPeerConnectionUsedThreads = 7;
+// Framework has extra thread for network layer and extra thread for peer
+// connection stats polling.
+constexpr int kFrameworkUsedThreads = 2;
+constexpr int kMaxVideoAnalyzerThreads = 8;
+
+constexpr TimeDelta kStatsUpdateInterval = TimeDelta::Seconds(1);
+
+constexpr TimeDelta kAliveMessageLogInterval = TimeDelta::Seconds(30);
+
+constexpr TimeDelta kQuickTestModeRunDuration = TimeDelta::Millis(100);
+
+// Field trials to enable Flex FEC advertising and receiving.
+constexpr char kFlexFecEnabledFieldTrials[] =
+ "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
+constexpr char kUseStandardsBytesStats[] =
+ "WebRTC-UseStandardBytesStats/Enabled/";
+
+class FixturePeerConnectionObserver : public MockPeerConnectionObserver {
+ public:
+ // `on_track_callback` will be called when any new track will be added to peer
+ // connection.
+ // `on_connected_callback` will be called when peer connection will come to
+ // either connected or completed state. Client should notice that in the case
+ // of reconnect this callback can be called again, so it should be tolerant
+ // to such behavior.
+ FixturePeerConnectionObserver(
+ std::function<void(rtc::scoped_refptr<RtpTransceiverInterface>)>
+ on_track_callback,
+ std::function<void()> on_connected_callback)
+ : on_track_callback_(std::move(on_track_callback)),
+ on_connected_callback_(std::move(on_connected_callback)) {}
+
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override {
+ MockPeerConnectionObserver::OnTrack(transceiver);
+ on_track_callback_(transceiver);
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ MockPeerConnectionObserver::OnIceConnectionChange(new_state);
+ if (ice_connected_) {
+ on_connected_callback_();
+ }
+ }
+
+ private:
+ std::function<void(rtc::scoped_refptr<RtpTransceiverInterface>)>
+ on_track_callback_;
+ std::function<void()> on_connected_callback_;
+};
+
+void ValidateP2PSimulcastParams(
+ const std::vector<std::unique_ptr<PeerConfigurer>>& peers) {
+ for (size_t i = 0; i < peers.size(); ++i) {
+ Params* params = peers[i]->params();
+ ConfigurableParams* configurable_params = peers[i]->configurable_params();
+ for (const VideoConfig& video_config : configurable_params->video_configs) {
+ if (video_config.simulcast_config) {
+ // When we simulate SFU we support only one video codec.
+ RTC_CHECK_EQ(params->video_codecs.size(), 1)
+ << "Only 1 video codec is supported when simulcast is enabled in "
+ << "at least 1 video config";
+ }
+ }
+ }
+}
+
+} // namespace
+
+PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest(
+ std::string test_case_name,
+ TimeController& time_controller,
+ std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
+ std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer)
+ : PeerConnectionE2EQualityTest(std::move(test_case_name),
+ time_controller,
+ std::move(audio_quality_analyzer),
+ std::move(video_quality_analyzer),
+ /*metrics_logger_=*/nullptr) {}
+
+PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest(
+ std::string test_case_name,
+ TimeController& time_controller,
+ std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
+ std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer,
+ test::MetricsLogger* metrics_logger)
+ : time_controller_(time_controller),
+ task_queue_factory_(time_controller_.CreateTaskQueueFactory()),
+ test_case_name_(std::move(test_case_name)),
+ executor_(std::make_unique<TestActivitiesExecutor>(
+ time_controller_.GetClock())),
+ metrics_logger_(metrics_logger) {
+ // Create default video quality analyzer. We will always create an analyzer,
+ // even if there are no video streams, because it will be installed into video
+ // encoder/decoder factories.
+ if (video_quality_analyzer == nullptr) {
+ video_quality_analyzer = std::make_unique<DefaultVideoQualityAnalyzer>(
+ time_controller_.GetClock(), metrics_logger_);
+ }
+ if (field_trial::IsEnabled("WebRTC-VideoFrameTrackingIdAdvertised")) {
+ encoded_image_data_propagator_ =
+ std::make_unique<VideoFrameTrackingIdInjector>();
+ } else {
+ encoded_image_data_propagator_ =
+ std::make_unique<SingleProcessEncodedImageDataInjector>();
+ }
+ video_quality_analyzer_injection_helper_ =
+ std::make_unique<VideoQualityAnalyzerInjectionHelper>(
+ time_controller_.GetClock(), std::move(video_quality_analyzer),
+ encoded_image_data_propagator_.get(),
+ encoded_image_data_propagator_.get());
+
+ if (audio_quality_analyzer == nullptr) {
+ audio_quality_analyzer =
+ std::make_unique<DefaultAudioQualityAnalyzer>(metrics_logger_);
+ }
+ audio_quality_analyzer_.swap(audio_quality_analyzer);
+}
+
+void PeerConnectionE2EQualityTest::ExecuteAt(
+ TimeDelta target_time_since_start,
+ std::function<void(TimeDelta)> func) {
+ executor_->ScheduleActivity(target_time_since_start, absl::nullopt, func);
+}
+
+void PeerConnectionE2EQualityTest::ExecuteEvery(
+ TimeDelta initial_delay_since_start,
+ TimeDelta interval,
+ std::function<void(TimeDelta)> func) {
+ executor_->ScheduleActivity(initial_delay_since_start, interval, func);
+}
+
+void PeerConnectionE2EQualityTest::AddQualityMetricsReporter(
+ std::unique_ptr<QualityMetricsReporter> quality_metrics_reporter) {
+ quality_metrics_reporters_.push_back(std::move(quality_metrics_reporter));
+}
+
+PeerConnectionE2EQualityTest::PeerHandle* PeerConnectionE2EQualityTest::AddPeer(
+ std::unique_ptr<PeerConfigurer> configurer) {
+ peer_configurations_.push_back(std::move(configurer));
+ peer_handles_.push_back(PeerHandleImpl());
+ return &peer_handles_.back();
+}
+
+void PeerConnectionE2EQualityTest::Run(RunParams run_params) {
+ webrtc::webrtc_pc_e2e::PeerParamsPreprocessor params_preprocessor;
+ for (auto& peer_configuration : peer_configurations_) {
+ params_preprocessor.SetDefaultValuesForMissingParams(*peer_configuration);
+ params_preprocessor.ValidateParams(*peer_configuration);
+ }
+ ValidateP2PSimulcastParams(peer_configurations_);
+ RTC_CHECK_EQ(peer_configurations_.size(), 2)
+ << "Only peer to peer calls are allowed, please add 2 peers";
+
+ std::unique_ptr<PeerConfigurer> alice_configurer =
+ std::move(peer_configurations_[0]);
+ std::unique_ptr<PeerConfigurer> bob_configurer =
+ std::move(peer_configurations_[1]);
+ peer_configurations_.clear();
+
+ for (size_t i = 0;
+ i < bob_configurer->configurable_params()->video_configs.size(); ++i) {
+ // We support simulcast only from caller.
+ RTC_CHECK(!bob_configurer->configurable_params()
+ ->video_configs[i]
+ .simulcast_config)
+ << "Only simulcast stream from first peer is supported";
+ }
+
+ test::ScopedFieldTrials field_trials(GetFieldTrials(run_params));
+
+ // Print test summary
+ RTC_LOG(LS_INFO)
+ << "Media quality test: " << *alice_configurer->params()->name
+ << " will make a call to " << *bob_configurer->params()->name
+ << " with media video="
+ << !alice_configurer->configurable_params()->video_configs.empty()
+ << "; audio=" << alice_configurer->params()->audio_config.has_value()
+ << ". " << *bob_configurer->params()->name
+ << " will respond with media video="
+ << !bob_configurer->configurable_params()->video_configs.empty()
+ << "; audio=" << bob_configurer->params()->audio_config.has_value();
+
+ const std::unique_ptr<rtc::Thread> signaling_thread =
+ time_controller_.CreateThread(kSignalThreadName);
+ media_helper_ = std::make_unique<MediaHelper>(
+ video_quality_analyzer_injection_helper_.get(), task_queue_factory_.get(),
+ time_controller_.GetClock());
+
+ // Create a `task_queue_`.
+ task_queue_ = std::make_unique<webrtc::TaskQueueForTest>(
+ time_controller_.GetTaskQueueFactory()->CreateTaskQueue(
+ "pc_e2e_quality_test", webrtc::TaskQueueFactory::Priority::NORMAL));
+
+ // Create call participants: Alice and Bob.
+ // Audio streams are intercepted in AudioDeviceModule, so if it is required to
+ // catch output of Alice's stream, Alice's output_dump_file_name should be
+ // passed to Bob's TestPeer setup as audio output file name.
+ absl::optional<RemotePeerAudioConfig> alice_remote_audio_config =
+ RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config);
+ absl::optional<RemotePeerAudioConfig> bob_remote_audio_config =
+ RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config);
+ // Copy Alice and Bob video configs, subscriptions and names to correctly pass
+ // them into lambdas.
+ VideoSubscription alice_subscription =
+ alice_configurer->configurable_params()->video_subscription;
+ std::vector<VideoConfig> alice_video_configs =
+ alice_configurer->configurable_params()->video_configs;
+ std::string alice_name = alice_configurer->params()->name.value();
+ VideoSubscription bob_subscription =
+ alice_configurer->configurable_params()->video_subscription;
+ std::vector<VideoConfig> bob_video_configs =
+ bob_configurer->configurable_params()->video_configs;
+ std::string bob_name = bob_configurer->params()->name.value();
+
+ TestPeerFactory test_peer_factory(
+ signaling_thread.get(), time_controller_,
+ video_quality_analyzer_injection_helper_.get(), task_queue_.get());
+ alice_ = test_peer_factory.CreateTestPeer(
+ std::move(alice_configurer),
+ std::make_unique<FixturePeerConnectionObserver>(
+ [this, alice_name, alice_subscription, bob_video_configs](
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ OnTrackCallback(alice_name, alice_subscription, transceiver,
+ bob_video_configs);
+ },
+ [this]() { StartVideo(alice_video_sources_); }),
+ alice_remote_audio_config, run_params.echo_emulation_config);
+ bob_ = test_peer_factory.CreateTestPeer(
+ std::move(bob_configurer),
+ std::make_unique<FixturePeerConnectionObserver>(
+ [this, bob_name, bob_subscription, alice_video_configs](
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ OnTrackCallback(bob_name, bob_subscription, transceiver,
+ alice_video_configs);
+ },
+ [this]() { StartVideo(bob_video_sources_); }),
+ bob_remote_audio_config, run_params.echo_emulation_config);
+
+ int num_cores = CpuInfo::DetectNumberOfCores();
+ RTC_DCHECK_GE(num_cores, 1);
+
+ int video_analyzer_threads =
+ num_cores - kPeerConnectionUsedThreads - kFrameworkUsedThreads;
+ if (video_analyzer_threads <= 0) {
+ video_analyzer_threads = 1;
+ }
+ video_analyzer_threads =
+ std::min(video_analyzer_threads, kMaxVideoAnalyzerThreads);
+ RTC_LOG(LS_INFO) << "video_analyzer_threads=" << video_analyzer_threads;
+ quality_metrics_reporters_.push_back(
+ std::make_unique<VideoQualityMetricsReporter>(time_controller_.GetClock(),
+ metrics_logger_));
+ quality_metrics_reporters_.push_back(
+ std::make_unique<CrossMediaMetricsReporter>(metrics_logger_));
+
+ video_quality_analyzer_injection_helper_->Start(
+ test_case_name_,
+ std::vector<std::string>{alice_->params().name.value(),
+ bob_->params().name.value()},
+ video_analyzer_threads);
+ audio_quality_analyzer_->Start(test_case_name_, &analyzer_helper_);
+ for (auto& reporter : quality_metrics_reporters_) {
+ reporter->Start(test_case_name_, &analyzer_helper_);
+ }
+
+ // Start RTCEventLog recording if requested.
+ if (alice_->params().rtc_event_log_path) {
+ auto alice_rtc_event_log = std::make_unique<webrtc::RtcEventLogOutputFile>(
+ alice_->params().rtc_event_log_path.value());
+ alice_->pc()->StartRtcEventLog(std::move(alice_rtc_event_log),
+ webrtc::RtcEventLog::kImmediateOutput);
+ }
+ if (bob_->params().rtc_event_log_path) {
+ auto bob_rtc_event_log = std::make_unique<webrtc::RtcEventLogOutputFile>(
+ bob_->params().rtc_event_log_path.value());
+ bob_->pc()->StartRtcEventLog(std::move(bob_rtc_event_log),
+ webrtc::RtcEventLog::kImmediateOutput);
+ }
+
+ // Setup alive logging. It is done to prevent test infra to think that test is
+ // dead.
+ RepeatingTaskHandle::DelayedStart(task_queue_->Get(),
+ kAliveMessageLogInterval, []() {
+ std::printf("Test is still running...\n");
+ return kAliveMessageLogInterval;
+ });
+
+ RTC_LOG(LS_INFO) << "Configuration is done. Now " << *alice_->params().name
+ << " is calling to " << *bob_->params().name << "...";
+
+ // Setup stats poller.
+ std::vector<StatsObserverInterface*> observers = {
+ audio_quality_analyzer_.get(),
+ video_quality_analyzer_injection_helper_.get()};
+ for (auto& reporter : quality_metrics_reporters_) {
+ observers.push_back(reporter.get());
+ }
+ StatsPoller stats_poller(observers,
+ std::map<std::string, StatsProvider*>{
+ {*alice_->params().name, alice_.get()},
+ {*bob_->params().name, bob_.get()}});
+ executor_->ScheduleActivity(TimeDelta::Zero(), kStatsUpdateInterval,
+ [&stats_poller](TimeDelta) {
+ stats_poller.PollStatsAndNotifyObservers();
+ });
+
+ // Setup call.
+ SendTask(signaling_thread.get(),
+ [this, &run_params] { SetupCallOnSignalingThread(run_params); });
+ std::unique_ptr<SignalingInterceptor> signaling_interceptor =
+ CreateSignalingInterceptor(run_params);
+ // Connect peers.
+ SendTask(signaling_thread.get(), [this, &signaling_interceptor] {
+ ExchangeOfferAnswer(signaling_interceptor.get());
+ });
+ WaitUntilIceCandidatesGathered(signaling_thread.get());
+
+ SendTask(signaling_thread.get(), [this, &signaling_interceptor] {
+ ExchangeIceCandidates(signaling_interceptor.get());
+ });
+ WaitUntilPeersAreConnected(signaling_thread.get());
+
+ executor_->Start(task_queue_.get());
+ Timestamp start_time = Now();
+
+ bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest");
+ if (is_quick_test_enabled) {
+ time_controller_.AdvanceTime(kQuickTestModeRunDuration);
+ } else {
+ time_controller_.AdvanceTime(run_params.run_duration);
+ }
+
+ RTC_LOG(LS_INFO) << "Test is done, initiating disconnect sequence.";
+
+ // Stop all client started tasks to prevent their access to any call related
+ // objects after these objects will be destroyed during call tear down.
+ executor_->Stop();
+ // There is no guarantee, that last stats collection will happen at the end
+ // of the call, so we force it after executor, which is among others is doing
+ // stats collection, was stopped.
+ task_queue_->SendTask([&stats_poller]() {
+ // Get final end-of-call stats.
+ stats_poller.PollStatsAndNotifyObservers();
+ });
+ // We need to detach AEC dumping from peers, because dump uses `task_queue_`
+ // inside.
+ alice_->DetachAecDump();
+ bob_->DetachAecDump();
+ // Tear down the call.
+ SendTask(signaling_thread.get(), [this] { TearDownCallOnSignalingThread(); });
+
+ Timestamp end_time = Now();
+ RTC_LOG(LS_INFO) << "All peers are disconnected.";
+ {
+ MutexLock lock(&lock_);
+ real_test_duration_ = end_time - start_time;
+ }
+
+ ReportGeneralTestResults();
+ audio_quality_analyzer_->Stop();
+ video_quality_analyzer_injection_helper_->Stop();
+ for (auto& reporter : quality_metrics_reporters_) {
+ reporter->StopAndReportResults();
+ }
+
+ // Reset `task_queue_` after test to cleanup.
+ task_queue_.reset();
+
+ alice_ = nullptr;
+ bob_ = nullptr;
+ // Ensuring that TestVideoCapturerVideoTrackSource are destroyed on the right
+ // thread.
+ RTC_CHECK(alice_video_sources_.empty());
+ RTC_CHECK(bob_video_sources_.empty());
+}
+
+std::string PeerConnectionE2EQualityTest::GetFieldTrials(
+ const RunParams& run_params) {
+ std::vector<absl::string_view> default_field_trials = {
+ kUseStandardsBytesStats};
+ if (run_params.enable_flex_fec_support) {
+ default_field_trials.push_back(kFlexFecEnabledFieldTrials);
+ }
+ rtc::StringBuilder sb;
+ sb << field_trial::GetFieldTrialString();
+ for (const absl::string_view& field_trial : default_field_trials) {
+ sb << field_trial;
+ }
+ return sb.Release();
+}
+
+void PeerConnectionE2EQualityTest::OnTrackCallback(
+ absl::string_view peer_name,
+ VideoSubscription peer_subscription,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
+ std::vector<VideoConfig> remote_video_configs) {
+ const rtc::scoped_refptr<MediaStreamTrackInterface>& track =
+ transceiver->receiver()->track();
+ RTC_CHECK_EQ(transceiver->receiver()->stream_ids().size(), 2)
+ << "Expected 2 stream ids: 1st - sync group, 2nd - unique stream label";
+ std::string sync_group = transceiver->receiver()->stream_ids()[0];
+ std::string stream_label = transceiver->receiver()->stream_ids()[1];
+ analyzer_helper_.AddTrackToStreamMapping(track->id(), peer_name, stream_label,
+ sync_group);
+ if (track->kind() != MediaStreamTrackInterface::kVideoKind) {
+ return;
+ }
+
+ // It is safe to cast here, because it is checked above that
+ // track->kind() is kVideoKind.
+ auto* video_track = static_cast<VideoTrackInterface*>(track.get());
+ std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>> video_sink =
+ video_quality_analyzer_injection_helper_->CreateVideoSink(
+ peer_name, peer_subscription, /*report_infra_stats=*/false);
+ video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants());
+ output_video_sinks_.push_back(std::move(video_sink));
+}
+
+void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread(
+ const RunParams& run_params) {
+ // We need receive-only transceivers for Bob's media stream, so there will
+ // be media section in SDP for that streams in Alice's offer, because it is
+ // forbidden to add new media sections in answer in Unified Plan.
+ RtpTransceiverInit receive_only_transceiver_init;
+ receive_only_transceiver_init.direction = RtpTransceiverDirection::kRecvOnly;
+ int alice_transceivers_counter = 0;
+ if (bob_->params().audio_config) {
+ // Setup receive audio transceiver if Bob has audio to send. If we'll need
+ // multiple audio streams, then we need transceiver for each Bob's audio
+ // stream.
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_AUDIO,
+ receive_only_transceiver_init);
+ RTC_CHECK(result.ok());
+ alice_transceivers_counter++;
+ }
+
+ size_t alice_video_transceivers_non_simulcast_counter = 0;
+ for (auto& video_config : alice_->configurable_params().video_configs) {
+ RtpTransceiverInit transceiver_params;
+ if (video_config.simulcast_config) {
+ transceiver_params.direction = RtpTransceiverDirection::kSendOnly;
+ // Because simulcast enabled `alice_->params().video_codecs` has only 1
+ // element.
+ if (alice_->params().video_codecs[0].name == cricket::kVp8CodecName) {
+ // For Vp8 simulcast we need to add as many RtpEncodingParameters to the
+ // track as many simulcast streams requested. If they specified in
+ // `video_config.simulcast_config` it should be copied from there.
+ for (int i = 0;
+ i < video_config.simulcast_config->simulcast_streams_count; ++i) {
+ RtpEncodingParameters enc_params;
+ if (!video_config.encoding_params.empty()) {
+ enc_params = video_config.encoding_params[i];
+ }
+ // We need to be sure, that all rids will be unique with all mids.
+ enc_params.rid = std::to_string(alice_transceivers_counter) + "000" +
+ std::to_string(i);
+ transceiver_params.send_encodings.push_back(enc_params);
+ }
+ }
+ } else {
+ transceiver_params.direction = RtpTransceiverDirection::kSendRecv;
+ RtpEncodingParameters enc_params;
+ if (video_config.encoding_params.size() == 1) {
+ enc_params = video_config.encoding_params[0];
+ }
+ transceiver_params.send_encodings.push_back(enc_params);
+
+ alice_video_transceivers_non_simulcast_counter++;
+ }
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO,
+ transceiver_params);
+ RTC_CHECK(result.ok());
+
+ alice_transceivers_counter++;
+ }
+
+ // Add receive only transceivers in case Bob has more video_configs than
+ // Alice.
+ for (size_t i = alice_video_transceivers_non_simulcast_counter;
+ i < bob_->configurable_params().video_configs.size(); ++i) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO,
+ receive_only_transceiver_init);
+ RTC_CHECK(result.ok());
+ alice_transceivers_counter++;
+ }
+
+ // Then add media for Alice and Bob
+ media_helper_->MaybeAddAudio(alice_.get());
+ alice_video_sources_ = media_helper_->MaybeAddVideo(alice_.get());
+ media_helper_->MaybeAddAudio(bob_.get());
+ bob_video_sources_ = media_helper_->MaybeAddVideo(bob_.get());
+
+ SetPeerCodecPreferences(alice_.get());
+ SetPeerCodecPreferences(bob_.get());
+}
+
+void PeerConnectionE2EQualityTest::TearDownCallOnSignalingThread() {
+ TearDownCall();
+}
+
+void PeerConnectionE2EQualityTest::SetPeerCodecPreferences(TestPeer* peer) {
+ std::vector<RtpCodecCapability> with_rtx_video_capabilities =
+ FilterVideoCodecCapabilities(
+ peer->params().video_codecs, true, peer->params().use_ulp_fec,
+ peer->params().use_flex_fec,
+ peer->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
+ .codecs);
+ std::vector<RtpCodecCapability> without_rtx_video_capabilities =
+ FilterVideoCodecCapabilities(
+ peer->params().video_codecs, false, peer->params().use_ulp_fec,
+ peer->params().use_flex_fec,
+ peer->pc_factory()
+ ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO)
+ .codecs);
+
+ // Set codecs for transceivers
+ for (auto transceiver : peer->pc()->GetTransceivers()) {
+ if (transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ if (transceiver->sender()->init_send_encodings().size() > 1) {
+ // If transceiver's sender has more then 1 send encodings, it means it
+ // has multiple simulcast streams, so we need disable RTX on it.
+ RTCError result =
+ transceiver->SetCodecPreferences(without_rtx_video_capabilities);
+ RTC_CHECK(result.ok());
+ } else {
+ RTCError result =
+ transceiver->SetCodecPreferences(with_rtx_video_capabilities);
+ RTC_CHECK(result.ok());
+ }
+ }
+ }
+}
+
+std::unique_ptr<SignalingInterceptor>
+PeerConnectionE2EQualityTest::CreateSignalingInterceptor(
+ const RunParams& run_params) {
+ std::map<std::string, int> stream_label_to_simulcast_streams_count;
+ // We add only Alice here, because simulcast/svc is supported only from the
+ // first peer.
+ for (auto& video_config : alice_->configurable_params().video_configs) {
+ if (video_config.simulcast_config) {
+ stream_label_to_simulcast_streams_count.insert(
+ {*video_config.stream_label,
+ video_config.simulcast_config->simulcast_streams_count});
+ }
+ }
+ PatchingParams patching_params(run_params.use_conference_mode,
+ stream_label_to_simulcast_streams_count);
+ return std::make_unique<SignalingInterceptor>(patching_params);
+}
+
+void PeerConnectionE2EQualityTest::WaitUntilIceCandidatesGathered(
+ rtc::Thread* signaling_thread) {
+ ASSERT_TRUE(time_controller_.Wait(
+ [&]() {
+ bool result;
+ SendTask(signaling_thread, [&]() {
+ result = alice_->IsIceGatheringDone() && bob_->IsIceGatheringDone();
+ });
+ return result;
+ },
+ 2 * kDefaultTimeout));
+}
+
+void PeerConnectionE2EQualityTest::WaitUntilPeersAreConnected(
+ rtc::Thread* signaling_thread) {
+ // This means that ICE and DTLS are connected.
+ alice_connected_ = time_controller_.Wait(
+ [&]() {
+ bool result;
+ SendTask(signaling_thread, [&] { result = alice_->IsIceConnected(); });
+ return result;
+ },
+ kDefaultTimeout);
+ bob_connected_ = time_controller_.Wait(
+ [&]() {
+ bool result;
+ SendTask(signaling_thread, [&] { result = bob_->IsIceConnected(); });
+ return result;
+ },
+ kDefaultTimeout);
+}
+
+void PeerConnectionE2EQualityTest::ExchangeOfferAnswer(
+ SignalingInterceptor* signaling_interceptor) {
+ std::string log_output;
+
+ auto offer = alice_->CreateOffer();
+ RTC_CHECK(offer);
+ offer->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Original offer: " << log_output;
+ LocalAndRemoteSdp patch_result = signaling_interceptor->PatchOffer(
+ std::move(offer), alice_->params().video_codecs[0]);
+ patch_result.local_sdp->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Offer to set as local description: " << log_output;
+ patch_result.remote_sdp->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Offer to set as remote description: " << log_output;
+
+ bool set_local_offer =
+ alice_->SetLocalDescription(std::move(patch_result.local_sdp));
+ RTC_CHECK(set_local_offer);
+ bool set_remote_offer =
+ bob_->SetRemoteDescription(std::move(patch_result.remote_sdp));
+ RTC_CHECK(set_remote_offer);
+ auto answer = bob_->CreateAnswer();
+ RTC_CHECK(answer);
+ answer->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Original answer: " << log_output;
+ patch_result = signaling_interceptor->PatchAnswer(
+ std::move(answer), bob_->params().video_codecs[0]);
+ patch_result.local_sdp->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Answer to set as local description: " << log_output;
+ patch_result.remote_sdp->ToString(&log_output);
+ RTC_LOG(LS_INFO) << "Answer to set as remote description: " << log_output;
+
+ bool set_local_answer =
+ bob_->SetLocalDescription(std::move(patch_result.local_sdp));
+ RTC_CHECK(set_local_answer);
+ bool set_remote_answer =
+ alice_->SetRemoteDescription(std::move(patch_result.remote_sdp));
+ RTC_CHECK(set_remote_answer);
+}
+
+void PeerConnectionE2EQualityTest::ExchangeIceCandidates(
+ SignalingInterceptor* signaling_interceptor) {
+ // Connect an ICE candidate pairs.
+ std::vector<std::unique_ptr<IceCandidateInterface>> alice_candidates =
+ signaling_interceptor->PatchOffererIceCandidates(
+ alice_->observer()->GetAllCandidates());
+ for (auto& candidate : alice_candidates) {
+ std::string candidate_str;
+ RTC_CHECK(candidate->ToString(&candidate_str));
+ RTC_LOG(LS_INFO) << *alice_->params().name
+ << " ICE candidate(mid= " << candidate->sdp_mid()
+ << "): " << candidate_str;
+ }
+ ASSERT_TRUE(bob_->AddIceCandidates(std::move(alice_candidates)));
+ std::vector<std::unique_ptr<IceCandidateInterface>> bob_candidates =
+ signaling_interceptor->PatchAnswererIceCandidates(
+ bob_->observer()->GetAllCandidates());
+ for (auto& candidate : bob_candidates) {
+ std::string candidate_str;
+ RTC_CHECK(candidate->ToString(&candidate_str));
+ RTC_LOG(LS_INFO) << *bob_->params().name
+ << " ICE candidate(mid= " << candidate->sdp_mid()
+ << "): " << candidate_str;
+ }
+ ASSERT_TRUE(alice_->AddIceCandidates(std::move(bob_candidates)));
+}
+
+void PeerConnectionE2EQualityTest::StartVideo(
+ const std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>&
+ sources) {
+ for (auto& source : sources) {
+ if (source->state() != MediaSourceInterface::SourceState::kLive) {
+ source->Start();
+ }
+ }
+}
+
+void PeerConnectionE2EQualityTest::TearDownCall() {
+ for (const auto& video_source : alice_video_sources_) {
+ video_source->Stop();
+ }
+ for (const auto& video_source : bob_video_sources_) {
+ video_source->Stop();
+ }
+
+ alice_video_sources_.clear();
+ bob_video_sources_.clear();
+
+ alice_->Close();
+ bob_->Close();
+
+ media_helper_ = nullptr;
+}
+
+void PeerConnectionE2EQualityTest::ReportGeneralTestResults() {
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ metrics_logger_->LogSingleValueMetric(
+ *alice_->params().name + "_connected", test_case_name_, alice_connected_,
+ Unit::kUnitless, ImprovementDirection::kBiggerIsBetter,
+ {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}});
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ metrics_logger_->LogSingleValueMetric(
+ *bob_->params().name + "_connected", test_case_name_, bob_connected_,
+ Unit::kUnitless, ImprovementDirection::kBiggerIsBetter,
+ {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}});
+}
+
+Timestamp PeerConnectionE2EQualityTest::Now() const {
+ return time_controller_.GetClock()->CurrentTime();
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h
new file mode 100644
index 0000000000..6cbf232874
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_H_
+#define TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_H_
+
+#include <memory>
+#include <queue>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/audio_quality_analyzer_interface.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/test/time_controller.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "system_wrappers/include/clock.h"
+#include "test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h"
+#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
+#include "test/pc/e2e/analyzer_helper.h"
+#include "test/pc/e2e/media/media_helper.h"
+#include "test/pc/e2e/sdp/sdp_changer.h"
+#include "test/pc/e2e/test_activities_executor.h"
+#include "test/pc/e2e/test_peer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class PeerConnectionE2EQualityTest
+ : public PeerConnectionE2EQualityTestFixture {
+ public:
+ using QualityMetricsReporter =
+ PeerConnectionE2EQualityTestFixture::QualityMetricsReporter;
+
+ PeerConnectionE2EQualityTest(
+ std::string test_case_name,
+ TimeController& time_controller,
+ std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
+ std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer);
+ PeerConnectionE2EQualityTest(
+ std::string test_case_name,
+ TimeController& time_controller,
+ std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
+ std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer,
+ test::MetricsLogger* metrics_logger);
+
+ ~PeerConnectionE2EQualityTest() override = default;
+
+ void ExecuteAt(TimeDelta target_time_since_start,
+ std::function<void(TimeDelta)> func) override;
+ void ExecuteEvery(TimeDelta initial_delay_since_start,
+ TimeDelta interval,
+ std::function<void(TimeDelta)> func) override;
+
+ void AddQualityMetricsReporter(std::unique_ptr<QualityMetricsReporter>
+ quality_metrics_reporter) override;
+
+ PeerHandle* AddPeer(std::unique_ptr<PeerConfigurer> configurer) override;
+ void Run(RunParams run_params) override;
+
+ TimeDelta GetRealTestDuration() const override {
+ MutexLock lock(&lock_);
+ RTC_CHECK_NE(real_test_duration_, TimeDelta::Zero());
+ return real_test_duration_;
+ }
+
+ private:
+ class PeerHandleImpl : public PeerHandle {
+ public:
+ ~PeerHandleImpl() override = default;
+ };
+
+ // For some functionality some field trials have to be enabled, they will be
+ // enabled in Run().
+ std::string GetFieldTrials(const RunParams& run_params);
+ void OnTrackCallback(absl::string_view peer_name,
+ VideoSubscription peer_subscription,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver,
+ std::vector<VideoConfig> remote_video_configs);
+ // Have to be run on the signaling thread.
+ void SetupCallOnSignalingThread(const RunParams& run_params);
+ void TearDownCallOnSignalingThread();
+ void SetPeerCodecPreferences(TestPeer* peer);
+ std::unique_ptr<SignalingInterceptor> CreateSignalingInterceptor(
+ const RunParams& run_params);
+ void WaitUntilIceCandidatesGathered(rtc::Thread* signaling_thread);
+ void WaitUntilPeersAreConnected(rtc::Thread* signaling_thread);
+ void ExchangeOfferAnswer(SignalingInterceptor* signaling_interceptor);
+ void ExchangeIceCandidates(SignalingInterceptor* signaling_interceptor);
+ void StartVideo(
+ const std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>&
+ sources);
+ void TearDownCall();
+ void ReportGeneralTestResults();
+ Timestamp Now() const;
+
+ TimeController& time_controller_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ std::string test_case_name_;
+ std::unique_ptr<VideoQualityAnalyzerInjectionHelper>
+ video_quality_analyzer_injection_helper_;
+ std::unique_ptr<MediaHelper> media_helper_;
+ std::unique_ptr<EncodedImageDataPropagator> encoded_image_data_propagator_;
+ std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer_;
+ std::unique_ptr<TestActivitiesExecutor> executor_;
+ test::MetricsLogger* const metrics_logger_;
+
+ std::vector<std::unique_ptr<PeerConfigurer>> peer_configurations_;
+ std::vector<PeerHandleImpl> peer_handles_;
+
+ std::unique_ptr<TestPeer> alice_;
+ std::unique_ptr<TestPeer> bob_;
+ std::vector<std::unique_ptr<QualityMetricsReporter>>
+ quality_metrics_reporters_;
+
+ std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>
+ alice_video_sources_;
+ std::vector<rtc::scoped_refptr<TestVideoCapturerVideoTrackSource>>
+ bob_video_sources_;
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
+ output_video_sinks_;
+ AnalyzerHelper analyzer_helper_;
+
+ mutable Mutex lock_;
+ TimeDelta real_test_duration_ RTC_GUARDED_BY(lock_) = TimeDelta::Zero();
+
+ // Task queue, that is used for running activities during test call.
+ // This task queue will be created before call set up and will be destroyed
+ // immediately before call tear down.
+ std::unique_ptr<TaskQueueForTest> task_queue_;
+
+ bool alice_connected_ = false;
+ bool bob_connected_ = false;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_PEER_CONNECTION_QUALITY_TEST_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc
new file mode 100644
index 0000000000..8a47e108e0
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc
@@ -0,0 +1,1102 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/create_peer_connection_quality_test_frame_generator.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/metrics/stdout_metrics_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/time_delta.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+#include "test/pc/e2e/peer_connection_quality_test.h"
+#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::UnorderedElementsAre;
+
+using ::webrtc::test::DefaultMetricsLogger;
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Metric;
+using ::webrtc::test::MetricsExporter;
+using ::webrtc::test::StdoutMetricsExporter;
+using ::webrtc::test::Unit;
+using ::webrtc::webrtc_pc_e2e::PeerConfigurer;
+
+// Adds a peer with some audio and video (the client should not care about
+// details about audio and video configs).
+void AddDefaultAudioVideoPeer(
+ absl::string_view peer_name,
+ absl::string_view audio_stream_label,
+ absl::string_view video_stream_label,
+ const PeerNetworkDependencies& network_dependencies,
+ PeerConnectionE2EQualityTestFixture& fixture) {
+ AudioConfig audio{std::string(audio_stream_label)};
+ audio.sync_group = std::string(peer_name);
+ VideoConfig video(std::string(video_stream_label), 320, 180, 15);
+ video.sync_group = std::string(peer_name);
+ auto peer = std::make_unique<PeerConfigurer>(network_dependencies);
+ peer->SetName(peer_name);
+ peer->SetAudioConfig(std::move(audio));
+ peer->AddVideoConfig(std::move(video));
+ peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)});
+ fixture.AddPeer(std::move(peer));
+}
+
+// Metric fields to assert on
+struct MetricValidationInfo {
+ std::string test_case;
+ std::string name;
+ Unit unit;
+ ImprovementDirection improvement_direction;
+ std::map<std::string, std::string> metadata;
+};
+
+bool operator==(const MetricValidationInfo& a, const MetricValidationInfo& b) {
+ return a.name == b.name && a.test_case == b.test_case && a.unit == b.unit &&
+ a.improvement_direction == b.improvement_direction &&
+ a.metadata == b.metadata;
+}
+
+std::ostream& operator<<(std::ostream& os, const MetricValidationInfo& m) {
+ os << "{ test_case=" << m.test_case << "; name=" << m.name
+ << "; unit=" << test::ToString(m.unit)
+ << "; improvement_direction=" << test::ToString(m.improvement_direction)
+ << "; metadata={ ";
+ for (const auto& [key, value] : m.metadata) {
+ os << "{ key=" << key << "; value=" << value << " }";
+ }
+ os << " }}";
+ return os;
+}
+
+std::vector<MetricValidationInfo> ToValidationInfo(
+ const std::vector<Metric>& metrics) {
+ std::vector<MetricValidationInfo> out;
+ for (const Metric& m : metrics) {
+ out.push_back(
+ MetricValidationInfo{.test_case = m.test_case,
+ .name = m.name,
+ .unit = m.unit,
+ .improvement_direction = m.improvement_direction,
+ .metadata = m.metric_metadata});
+ }
+ return out;
+}
+
+TEST(PeerConnectionE2EQualityTestMetricNamesTest,
+ ExportedMetricsHasCorrectNamesAndAnnotation) {
+ std::unique_ptr<NetworkEmulationManager> network_emulation =
+ CreateNetworkEmulationManager(TimeMode::kSimulated);
+ DefaultMetricsLogger metrics_logger(
+ network_emulation->time_controller()->GetClock());
+ PeerConnectionE2EQualityTest fixture(
+ "test_case", *network_emulation->time_controller(),
+ /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr,
+ &metrics_logger);
+
+ EmulatedEndpoint* alice_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+
+ network_emulation->CreateRoute(
+ alice_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()},
+ bob_endpoint);
+ network_emulation->CreateRoute(
+ bob_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()},
+ alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface(
+ {alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video",
+ alice_network->network_dependencies(), fixture);
+ AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video",
+ bob_network->network_dependencies(), fixture);
+ fixture.AddQualityMetricsReporter(
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ std::map<std::string, std::vector<EmulatedEndpoint*>>(
+ {{"alice", alice_network->endpoints()},
+ {"bob", bob_network->endpoints()}}),
+ network_emulation.get(), &metrics_logger));
+
+ // Run for at least 7 seconds, so AV-sync metrics will be collected.
+ fixture.Run(RunParams(TimeDelta::Seconds(7)));
+
+ std::vector<MetricValidationInfo> metrics =
+ ToValidationInfo(metrics_logger.GetCollectedMetrics());
+ EXPECT_THAT(
+ metrics,
+ UnorderedElementsAre(
+ // Metrics from PeerConnectionE2EQualityTest
+ MetricValidationInfo{
+ .test_case = "test_case",
+ .name = "alice_connected",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case",
+ .name = "bob_connected",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+
+ // Metrics from DefaultAudioQualityAnalyzer
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "expand_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "accelerate_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "preemptive_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "speech_expand_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "average_jitter_buffer_delay_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_audio",
+ .name = "preferred_buffer_size_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "expand_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "accelerate_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "preemptive_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "speech_expand_rate",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "average_jitter_buffer_delay_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_audio",
+ .name = "preferred_buffer_size_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey,
+ "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+
+ // Metrics from DefaultVideoQualityAnalyzer
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "ssim",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "transport_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "total_delay_incl_transport",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "time_between_rendered_frames",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "harmonic_framerate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "encode_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "encode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "time_between_freezes",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "freeze_time_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "pixels_per_frame",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "min_psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "decode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "receive_to_render_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "dropped_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "frames_in_flight",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "rendered_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "max_skipped",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "target_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "qp_sl0",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kSpatialLayerMetadataKey, "0"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_video",
+ .name = "actual_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "alice_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "ssim",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "transport_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "total_delay_incl_transport",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "time_between_rendered_frames",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "harmonic_framerate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "encode_frame_rate",
+ .unit = Unit::kHertz,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "encode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "time_between_freezes",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "freeze_time_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "pixels_per_frame",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "min_psnr_dB",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "decode_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "receive_to_render_time",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "dropped_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "frames_in_flight",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "rendered_frames",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kBiggerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "max_skipped",
+ .unit = Unit::kCount,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "target_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "actual_encode_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_video",
+ .name = "qp_sl0",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kVideoStreamMetadataKey,
+ "bob_video"},
+ {MetricMetadataKey::kSenderMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kSpatialLayerMetadataKey, "0"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case",
+ .name = "cpu_usage_%",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {{MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+
+ // Metrics from StatsBasedNetworkQualityMetricsReporter
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "bytes_discarded_no_receiver",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "packets_discarded_no_receiver",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "payload_bytes_received",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "payload_bytes_sent",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "bytes_sent",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "packets_sent",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "average_send_rate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "bytes_received",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "packets_received",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "average_receive_rate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "sent_packets_loss",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "bytes_discarded_no_receiver",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "packets_discarded_no_receiver",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "payload_bytes_received",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "payload_bytes_sent",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "bytes_sent",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "packets_sent",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "average_send_rate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "bytes_received",
+ .unit = Unit::kBytes,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "packets_received",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "average_receive_rate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "sent_packets_loss",
+ .unit = Unit::kUnitless,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+
+ // Metrics from VideoQualityMetricsReporter
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "available_send_bandwidth",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "transmission_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice",
+ .name = "retransmission_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "available_send_bandwidth",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "transmission_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob",
+ .name = "retransmission_bitrate",
+ .unit = Unit::kKilobitsPerSecond,
+ .improvement_direction = ImprovementDirection::kNeitherIsBetter,
+ .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+
+ // Metrics from CrossMediaMetricsReporter
+ MetricValidationInfo{
+ .test_case = "test_case/alice_alice_audio",
+ .name = "audio_ahead_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata =
+ {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/alice_alice_video",
+ .name = "video_ahead_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata =
+ {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_video"},
+ {MetricMetadataKey::kPeerMetadataKey, "bob"},
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"},
+ {MetricMetadataKey::kReceiverMetadataKey, "bob"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_bob_audio",
+ .name = "audio_ahead_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata =
+ {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}},
+ MetricValidationInfo{
+ .test_case = "test_case/bob_bob_video",
+ .name = "video_ahead_ms",
+ .unit = Unit::kMilliseconds,
+ .improvement_direction = ImprovementDirection::kSmallerIsBetter,
+ .metadata = {
+ {MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"},
+ {MetricMetadataKey::kPeerMetadataKey, "alice"},
+ {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"},
+ {MetricMetadataKey::kReceiverMetadataKey, "alice"},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey,
+ "test_case"}}}));
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc
new file mode 100644
index 0000000000..066fe7d8ee
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_test.cc
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/peer_connection_quality_test.h"
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/time_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Test;
+
+using ::webrtc::webrtc_pc_e2e::PeerConfigurer;
+
+// Remove files and directories in a directory non-recursively.
+void CleanDir(absl::string_view dir, size_t expected_output_files_count) {
+ absl::optional<std::vector<std::string>> dir_content =
+ test::ReadDirectory(dir);
+ if (expected_output_files_count == 0) {
+ ASSERT_FALSE(dir_content.has_value()) << "Empty directory is expected";
+ } else {
+ ASSERT_TRUE(dir_content.has_value()) << "Test directory is empty!";
+ EXPECT_EQ(dir_content->size(), expected_output_files_count);
+ for (const auto& entry : *dir_content) {
+ if (test::DirExists(entry)) {
+ EXPECT_TRUE(test::RemoveDir(entry))
+ << "Failed to remove sub directory: " << entry;
+ } else if (test::FileExists(entry)) {
+ EXPECT_TRUE(test::RemoveFile(entry))
+ << "Failed to remove file: " << entry;
+ } else {
+ FAIL() << "Can't remove unknown file type: " << entry;
+ }
+ }
+ }
+ EXPECT_TRUE(test::RemoveDir(dir)) << "Failed to remove directory: " << dir;
+}
+
+class PeerConnectionE2EQualityTestTest : public Test {
+ protected:
+ ~PeerConnectionE2EQualityTestTest() override = default;
+
+ void SetUp() override {
+ // Create an empty temporary directory for this test.
+ test_directory_ = test::JoinFilename(
+ test::OutputPath(),
+ "TestDir_PeerConnectionE2EQualityTestTest_" +
+ std::string(
+ testing::UnitTest::GetInstance()->current_test_info()->name()));
+ test::CreateDir(test_directory_);
+ }
+
+ void TearDown() override {
+ CleanDir(test_directory_, expected_output_files_count_);
+ }
+
+ void ExpectOutputFilesCount(size_t count) {
+ expected_output_files_count_ = count;
+ }
+
+ std::string test_directory_;
+ size_t expected_output_files_count_ = 0;
+};
+
+TEST_F(PeerConnectionE2EQualityTestTest, OutputVideoIsDumpedWhenRequested) {
+ std::unique_ptr<NetworkEmulationManager> network_emulation =
+ CreateNetworkEmulationManager(TimeMode::kSimulated);
+ PeerConnectionE2EQualityTest fixture(
+ "test_case", *network_emulation->time_controller(),
+ /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr,
+ test::GetGlobalMetricsLogger());
+
+ EmulatedEndpoint* alice_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+
+ network_emulation->CreateRoute(
+ alice_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()},
+ bob_endpoint);
+ network_emulation->CreateRoute(
+ bob_endpoint, {network_emulation->CreateUnconstrainedEmulatedNode()},
+ alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface(
+ {alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ VideoConfig alice_video("alice_video", 320, 180, 15);
+ alice_video.output_dump_options = VideoDumpOptions(test_directory_);
+ PeerConfigurer alice(alice_network->network_dependencies());
+ alice.SetName("alice");
+ alice.AddVideoConfig(std::move(alice_video));
+ fixture.AddPeer(std::make_unique<PeerConfigurer>(std::move(alice)));
+
+ PeerConfigurer bob(bob_network->network_dependencies());
+ bob.SetName("bob");
+ fixture.AddPeer(std::make_unique<PeerConfigurer>(std::move(bob)));
+
+ fixture.Run(RunParams(TimeDelta::Seconds(2)));
+
+ auto frame_reader = test::CreateY4mFrameReader(
+ test::JoinFilename(test_directory_, "alice_video_bob_320x180_15.y4m"));
+ EXPECT_THAT(frame_reader->num_frames(), Eq(31)); // 2 seconds 15 fps + 1
+
+ ExpectOutputFilesCount(1);
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.cc b/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.cc
new file mode 100644
index 0000000000..05372125d2
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.cc
@@ -0,0 +1,217 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/peer_params_preprocessor.h"
+
+#include <set>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peer_network_dependencies.h"
+#include "modules/video_coding/svc/create_scalability_structure.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "rtc_base/arraysize.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+// List of default names of generic participants according to
+// https://en.wikipedia.org/wiki/Alice_and_Bob
+constexpr absl::string_view kDefaultNames[] = {"alice", "bob", "charlie",
+ "david", "erin", "frank"};
+
+} // namespace
+
+class PeerParamsPreprocessor::DefaultNamesProvider {
+ public:
+ // Caller have to ensure that default names array will outlive names provider
+ // instance.
+ explicit DefaultNamesProvider(
+ absl::string_view prefix,
+ rtc::ArrayView<const absl::string_view> default_names = {})
+ : prefix_(prefix), default_names_(default_names) {}
+
+ void MaybeSetName(absl::optional<std::string>& name) {
+ if (name.has_value()) {
+ known_names_.insert(name.value());
+ } else {
+ name = GenerateName();
+ }
+ }
+
+ private:
+ std::string GenerateName() {
+ std::string name;
+ do {
+ name = GenerateNameInternal();
+ } while (!known_names_.insert(name).second);
+ return name;
+ }
+
+ std::string GenerateNameInternal() {
+ if (counter_ < default_names_.size()) {
+ return std::string(default_names_[counter_++]);
+ }
+ return prefix_ + std::to_string(counter_++);
+ }
+
+ const std::string prefix_;
+ const rtc::ArrayView<const absl::string_view> default_names_;
+
+ std::set<std::string> known_names_;
+ size_t counter_ = 0;
+};
+
+PeerParamsPreprocessor::PeerParamsPreprocessor()
+ : peer_names_provider_(
+ std::make_unique<DefaultNamesProvider>("peer_", kDefaultNames)) {}
+PeerParamsPreprocessor::~PeerParamsPreprocessor() = default;
+
+void PeerParamsPreprocessor::SetDefaultValuesForMissingParams(
+ PeerConfigurer& peer) {
+ Params* params = peer.params();
+ ConfigurableParams* configurable_params = peer.configurable_params();
+ peer_names_provider_->MaybeSetName(params->name);
+ DefaultNamesProvider video_stream_names_provider(*params->name +
+ "_auto_video_stream_label_");
+ for (VideoConfig& config : configurable_params->video_configs) {
+ video_stream_names_provider.MaybeSetName(config.stream_label);
+ }
+ if (params->audio_config) {
+ DefaultNamesProvider audio_stream_names_provider(
+ *params->name + "_auto_audio_stream_label_");
+ audio_stream_names_provider.MaybeSetName(
+ params->audio_config->stream_label);
+ }
+
+ if (params->video_codecs.empty()) {
+ params->video_codecs.push_back(VideoCodecConfig(cricket::kVp8CodecName));
+ }
+}
+
+void PeerParamsPreprocessor::ValidateParams(const PeerConfigurer& peer) {
+ const Params& p = peer.params();
+ RTC_CHECK_GT(p.video_encoder_bitrate_multiplier, 0.0);
+ // Each peer should at least support 1 video codec.
+ RTC_CHECK_GE(p.video_codecs.size(), 1);
+
+ {
+ RTC_CHECK(p.name);
+ bool inserted = peer_names_.insert(p.name.value()).second;
+ RTC_CHECK(inserted) << "Duplicate name=" << p.name.value();
+ }
+
+ // Validate that all video stream labels are unique and sync groups are
+ // valid.
+ for (const VideoConfig& video_config :
+ peer.configurable_params().video_configs) {
+ RTC_CHECK(video_config.stream_label);
+ bool inserted =
+ video_labels_.insert(video_config.stream_label.value()).second;
+ RTC_CHECK(inserted) << "Duplicate video_config.stream_label="
+ << video_config.stream_label.value();
+
+ // TODO(bugs.webrtc.org/4762): remove this check after synchronization of
+ // more than two streams is supported.
+ if (video_config.sync_group.has_value()) {
+ bool sync_group_inserted =
+ video_sync_groups_.insert(video_config.sync_group.value()).second;
+ RTC_CHECK(sync_group_inserted)
+ << "Sync group shouldn't consist of more than two streams (one "
+ "video and one audio). Duplicate video_config.sync_group="
+ << video_config.sync_group.value();
+ }
+
+ if (video_config.simulcast_config) {
+ if (!video_config.encoding_params.empty()) {
+ RTC_CHECK_EQ(video_config.simulcast_config->simulcast_streams_count,
+ video_config.encoding_params.size())
+ << "|encoding_params| have to be specified for each simulcast "
+ << "stream in |video_config|.";
+ }
+ } else {
+ RTC_CHECK_LE(video_config.encoding_params.size(), 1)
+ << "|encoding_params| has multiple values but simulcast is not "
+ "enabled.";
+ }
+
+ if (video_config.emulated_sfu_config) {
+ if (video_config.simulcast_config &&
+ video_config.emulated_sfu_config->target_layer_index) {
+ RTC_CHECK_LT(*video_config.emulated_sfu_config->target_layer_index,
+ video_config.simulcast_config->simulcast_streams_count);
+ }
+ if (!video_config.encoding_params.empty()) {
+ bool is_svc = false;
+ for (const auto& encoding_param : video_config.encoding_params) {
+ if (!encoding_param.scalability_mode)
+ continue;
+
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(*encoding_param.scalability_mode);
+ RTC_CHECK(scalability_mode) << "Unknown scalability_mode requested";
+
+ absl::optional<ScalableVideoController::StreamLayersConfig>
+ stream_layers_config =
+ ScalabilityStructureConfig(*scalability_mode);
+ is_svc |= stream_layers_config->num_spatial_layers > 1;
+ RTC_CHECK(stream_layers_config->num_spatial_layers == 1 ||
+ video_config.encoding_params.size() == 1)
+ << "Can't enable SVC modes with multiple spatial layers ("
+ << stream_layers_config->num_spatial_layers
+ << " layers) or simulcast ("
+ << video_config.encoding_params.size() << " layers)";
+ if (video_config.emulated_sfu_config->target_layer_index) {
+ RTC_CHECK_LT(*video_config.emulated_sfu_config->target_layer_index,
+ stream_layers_config->num_spatial_layers);
+ }
+ }
+ if (!is_svc && video_config.emulated_sfu_config->target_layer_index) {
+ RTC_CHECK_LT(*video_config.emulated_sfu_config->target_layer_index,
+ video_config.encoding_params.size());
+ }
+ }
+ }
+ }
+ if (p.audio_config) {
+ bool inserted =
+ audio_labels_.insert(p.audio_config->stream_label.value()).second;
+ RTC_CHECK(inserted) << "Duplicate audio_config.stream_label="
+ << p.audio_config->stream_label.value();
+ // TODO(bugs.webrtc.org/4762): remove this check after synchronization of
+ // more than two streams is supported.
+ if (p.audio_config->sync_group.has_value()) {
+ bool sync_group_inserted =
+ audio_sync_groups_.insert(p.audio_config->sync_group.value()).second;
+ RTC_CHECK(sync_group_inserted)
+ << "Sync group shouldn't consist of more than two streams (one "
+ "video and one audio). Duplicate audio_config.sync_group="
+ << p.audio_config->sync_group.value();
+ }
+ // Check that if mode input file name specified only if mode is kFile.
+ if (p.audio_config.value().mode == AudioConfig::Mode::kGenerated) {
+ RTC_CHECK(!p.audio_config.value().input_file_name);
+ }
+ if (p.audio_config.value().mode == AudioConfig::Mode::kFile) {
+ RTC_CHECK(p.audio_config.value().input_file_name);
+ RTC_CHECK(
+ test::FileExists(p.audio_config.value().input_file_name.value()))
+ << p.audio_config.value().input_file_name.value() << " doesn't exist";
+ }
+ }
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.h b/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.h
new file mode 100644
index 0000000000..c222811546
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/peer_params_preprocessor.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_
+#define TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_
+
+#include <memory>
+#include <set>
+#include <string>
+
+#include "api/test/pclf/peer_configurer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class PeerParamsPreprocessor {
+ public:
+ PeerParamsPreprocessor();
+ ~PeerParamsPreprocessor();
+
+ // Set missing params to default values if it is required:
+ // * Generate video stream labels if some of them are missing
+ // * Generate audio stream labels if some of them are missing
+ // * Set video source generation mode if it is not specified
+ // * Video codecs under test
+ void SetDefaultValuesForMissingParams(PeerConfigurer& peer);
+
+ // Validate peer's parameters, also ensure uniqueness of all video stream
+ // labels.
+ void ValidateParams(const PeerConfigurer& peer);
+
+ private:
+ class DefaultNamesProvider;
+ std::unique_ptr<DefaultNamesProvider> peer_names_provider_;
+
+ std::set<std::string> peer_names_;
+ std::set<std::string> video_labels_;
+ std::set<std::string> audio_labels_;
+ std::set<std::string> video_sync_groups_;
+ std::set<std::string> audio_sync_groups_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_PEER_PARAMS_PREPROCESSOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.cc b/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.cc
new file mode 100644
index 0000000000..af55f29175
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.cc
@@ -0,0 +1,601 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/sdp/sdp_changer.h"
+
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/jsep_session_description.h"
+#include "api/test/pclf/media_configuration.h"
+#include "media/base/media_constants.h"
+#include "p2p/base/p2p_constants.h"
+#include "pc/sdp_utils.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+std::string CodecRequiredParamsToString(
+ const std::map<std::string, std::string>& codec_required_params) {
+ rtc::StringBuilder out;
+ for (const auto& entry : codec_required_params) {
+ out << entry.first << "=" << entry.second << ";";
+ }
+ return out.str();
+}
+
+std::string SupportedCodecsToString(
+ rtc::ArrayView<const RtpCodecCapability> supported_codecs) {
+ rtc::StringBuilder out;
+ for (const auto& codec : supported_codecs) {
+ out << codec.name;
+ if (!codec.parameters.empty()) {
+ out << "(";
+ for (const auto& param : codec.parameters) {
+ out << param.first << "=" << param.second << ";";
+ }
+ out << ")";
+ }
+ out << "; ";
+ }
+ return out.str();
+}
+
+} // namespace
+
+std::vector<RtpCodecCapability> FilterVideoCodecCapabilities(
+ rtc::ArrayView<const VideoCodecConfig> video_codecs,
+ bool use_rtx,
+ bool use_ulpfec,
+ bool use_flexfec,
+ rtc::ArrayView<const RtpCodecCapability> supported_codecs) {
+ std::vector<RtpCodecCapability> output_codecs;
+ // Find requested codecs among supported and add them to output in the order
+ // they were requested.
+ for (auto& codec_request : video_codecs) {
+ size_t size_before = output_codecs.size();
+ for (auto& codec : supported_codecs) {
+ if (codec.name != codec_request.name) {
+ continue;
+ }
+ bool parameters_matched = true;
+ for (const auto& item : codec_request.required_params) {
+ auto it = codec.parameters.find(item.first);
+ if (it == codec.parameters.end()) {
+ parameters_matched = false;
+ break;
+ }
+ if (item.second != it->second) {
+ parameters_matched = false;
+ break;
+ }
+ }
+ if (parameters_matched) {
+ output_codecs.push_back(codec);
+ }
+ }
+ RTC_CHECK_GT(output_codecs.size(), size_before)
+ << "Codec with name=" << codec_request.name << " and params {"
+ << CodecRequiredParamsToString(codec_request.required_params)
+ << "} is unsupported for this peer connection. Supported codecs are: "
+ << SupportedCodecsToString(supported_codecs);
+ }
+
+ // Add required FEC and RTX codecs to output.
+ for (auto& codec : supported_codecs) {
+ if (codec.name == cricket::kRtxCodecName && use_rtx) {
+ output_codecs.push_back(codec);
+ } else if (codec.name == cricket::kFlexfecCodecName && use_flexfec) {
+ output_codecs.push_back(codec);
+ } else if ((codec.name == cricket::kRedCodecName ||
+ codec.name == cricket::kUlpfecCodecName) &&
+ use_ulpfec) {
+ // Red and ulpfec should be enabled or disabled together.
+ output_codecs.push_back(codec);
+ }
+ }
+ return output_codecs;
+}
+
+// If offer has no simulcast video sections - do nothing.
+//
+// If offer has simulcast video sections - for each section creates
+// SimulcastSectionInfo and put it into `context_`.
+void SignalingInterceptor::FillSimulcastContext(
+ SessionDescriptionInterface* offer) {
+ for (auto& content : offer->description()->contents()) {
+ cricket::MediaContentDescription* media_desc = content.media_description();
+ if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ continue;
+ }
+ if (media_desc->HasSimulcast()) {
+ // We support only single stream simulcast sections with rids.
+ RTC_CHECK_EQ(media_desc->mutable_streams().size(), 1);
+ RTC_CHECK(media_desc->mutable_streams()[0].has_rids());
+
+ // Create SimulcastSectionInfo for this video section.
+ SimulcastSectionInfo info(content.mid(), content.type,
+ media_desc->mutable_streams()[0].rids());
+
+ // Set new rids basing on created SimulcastSectionInfo.
+ std::vector<cricket::RidDescription> rids;
+ cricket::SimulcastDescription simulcast_description;
+ for (std::string& rid : info.rids) {
+ rids.emplace_back(rid, cricket::RidDirection::kSend);
+ simulcast_description.send_layers().AddLayer(
+ cricket::SimulcastLayer(rid, false));
+ }
+ media_desc->mutable_streams()[0].set_rids(rids);
+ media_desc->set_simulcast_description(simulcast_description);
+
+ info.simulcast_description = media_desc->simulcast_description();
+ for (const auto& extension : media_desc->rtp_header_extensions()) {
+ if (extension.uri == RtpExtension::kMidUri) {
+ info.mid_extension = extension;
+ } else if (extension.uri == RtpExtension::kRidUri) {
+ info.rid_extension = extension;
+ } else if (extension.uri == RtpExtension::kRepairedRidUri) {
+ info.rrid_extension = extension;
+ }
+ }
+ RTC_CHECK_NE(info.rid_extension.id, 0);
+ RTC_CHECK_NE(info.mid_extension.id, 0);
+ bool transport_description_found = false;
+ for (auto& transport_info : offer->description()->transport_infos()) {
+ if (transport_info.content_name == info.mid) {
+ info.transport_description = transport_info.description;
+ transport_description_found = true;
+ break;
+ }
+ }
+ RTC_CHECK(transport_description_found);
+
+ context_.AddSimulcastInfo(info);
+ }
+ }
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchOffer(
+ std::unique_ptr<SessionDescriptionInterface> offer,
+ const VideoCodecConfig& first_codec) {
+ for (auto& content : offer->description()->contents()) {
+ context_.mids_order.push_back(content.mid());
+ cricket::MediaContentDescription* media_desc = content.media_description();
+ if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ continue;
+ }
+ if (content.media_description()->streams().empty()) {
+ // It means that this media section describes receive only media section
+ // in SDP.
+ RTC_CHECK_EQ(content.media_description()->direction(),
+ RtpTransceiverDirection::kRecvOnly);
+ continue;
+ }
+ media_desc->set_conference_mode(params_.use_conference_mode);
+ }
+
+ if (!params_.stream_label_to_simulcast_streams_count.empty()) {
+ // Because simulcast enabled `params_.video_codecs` has only 1 element.
+ if (first_codec.name == cricket::kVp8CodecName) {
+ return PatchVp8Offer(std::move(offer));
+ }
+
+ if (first_codec.name == cricket::kVp9CodecName) {
+ return PatchVp9Offer(std::move(offer));
+ }
+ }
+
+ auto offer_for_remote = CloneSessionDescription(offer.get());
+ return LocalAndRemoteSdp(std::move(offer), std::move(offer_for_remote));
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer(
+ std::unique_ptr<SessionDescriptionInterface> offer) {
+ FillSimulcastContext(offer.get());
+ if (!context_.HasSimulcast()) {
+ auto offer_for_remote = CloneSessionDescription(offer.get());
+ return LocalAndRemoteSdp(std::move(offer), std::move(offer_for_remote));
+ }
+
+ // Clone original offer description. We mustn't access original offer after
+ // this point.
+ std::unique_ptr<cricket::SessionDescription> desc =
+ offer->description()->Clone();
+
+ for (auto& info : context_.simulcast_infos) {
+ // For each simulcast section we have to perform:
+ // 1. Swap MID and RID header extensions
+ // 2. Remove RIDs from streams and remove SimulcastDescription
+ // 3. For each RID duplicate media section
+ cricket::ContentInfo* simulcast_content = desc->GetContentByName(info.mid);
+
+ // Now we need to prepare common prototype for "m=video" sections, in which
+ // single simulcast section will be converted. Do it before removing content
+ // because otherwise description will be deleted.
+ std::unique_ptr<cricket::MediaContentDescription> prototype_media_desc =
+ simulcast_content->media_description()->Clone();
+
+ // Remove simulcast video section from offer.
+ RTC_CHECK(desc->RemoveContentByName(simulcast_content->mid()));
+ // Clear `simulcast_content`, because now it is pointing to removed object.
+ simulcast_content = nullptr;
+
+ // Swap mid and rid extensions, so remote peer will understand rid as mid.
+ // Also remove rid extension.
+ std::vector<webrtc::RtpExtension> extensions =
+ prototype_media_desc->rtp_header_extensions();
+ for (auto ext_it = extensions.begin(); ext_it != extensions.end();) {
+ if (ext_it->uri == RtpExtension::kRidUri) {
+ // We don't need rid extension for remote peer.
+ ext_it = extensions.erase(ext_it);
+ continue;
+ }
+ if (ext_it->uri == RtpExtension::kRepairedRidUri) {
+ // We don't support RTX in simulcast.
+ ext_it = extensions.erase(ext_it);
+ continue;
+ }
+ if (ext_it->uri == RtpExtension::kMidUri) {
+ ext_it->id = info.rid_extension.id;
+ }
+ ++ext_it;
+ }
+
+ prototype_media_desc->ClearRtpHeaderExtensions();
+ prototype_media_desc->set_rtp_header_extensions(extensions);
+
+ // We support only single stream inside video section with simulcast
+ RTC_CHECK_EQ(prototype_media_desc->mutable_streams().size(), 1);
+ // This stream must have rids.
+ RTC_CHECK(prototype_media_desc->mutable_streams()[0].has_rids());
+
+ // Remove rids and simulcast description from media description.
+ prototype_media_desc->mutable_streams()[0].set_rids({});
+ prototype_media_desc->set_simulcast_description(
+ cricket::SimulcastDescription());
+
+ // For each rid add separate video section.
+ for (std::string& rid : info.rids) {
+ desc->AddContent(rid, info.media_protocol_type,
+ prototype_media_desc->Clone());
+ }
+ }
+
+ // Now we need to add bundle line to have all media bundled together.
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ for (auto& content : desc->contents()) {
+ bundle_group.AddContentName(content.mid());
+ }
+ if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
+ desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ }
+ desc->AddGroup(bundle_group);
+
+ // Update transport_infos to add TransportInfo for each new media section.
+ std::vector<cricket::TransportInfo> transport_infos = desc->transport_infos();
+ transport_infos.erase(std::remove_if(
+ transport_infos.begin(), transport_infos.end(),
+ [this](const cricket::TransportInfo& ti) {
+ // Remove transport infos that correspond to simulcast video sections.
+ return context_.simulcast_infos_by_mid.find(ti.content_name) !=
+ context_.simulcast_infos_by_mid.end();
+ }));
+ for (auto& info : context_.simulcast_infos) {
+ for (auto& rid : info.rids) {
+ transport_infos.emplace_back(rid, info.transport_description);
+ }
+ }
+ desc->set_transport_infos(transport_infos);
+
+ // Create patched offer.
+ auto patched_offer =
+ std::make_unique<JsepSessionDescription>(SdpType::kOffer);
+ patched_offer->Initialize(std::move(desc), offer->session_id(),
+ offer->session_version());
+ return LocalAndRemoteSdp(std::move(offer), std::move(patched_offer));
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer(
+ std::unique_ptr<SessionDescriptionInterface> offer) {
+ rtc::UniqueRandomIdGenerator ssrcs_generator;
+ for (auto& content : offer->description()->contents()) {
+ for (auto& stream : content.media_description()->streams()) {
+ for (auto& ssrc : stream.ssrcs) {
+ ssrcs_generator.AddKnownId(ssrc);
+ }
+ }
+ }
+
+ for (auto& content : offer->description()->contents()) {
+ if (content.media_description()->type() !=
+ cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ // We are interested in only video tracks
+ continue;
+ }
+ if (content.media_description()->direction() ==
+ RtpTransceiverDirection::kRecvOnly) {
+ // If direction is receive only, then there is no media in this track from
+ // sender side, so we needn't to do anything with this track.
+ continue;
+ }
+ RTC_CHECK_EQ(content.media_description()->streams().size(), 1);
+ cricket::StreamParams& stream =
+ content.media_description()->mutable_streams()[0];
+ RTC_CHECK_EQ(stream.stream_ids().size(), 2)
+ << "Expected 2 stream ids in video stream: 1st - sync_group, 2nd - "
+ "unique label";
+ std::string stream_label = stream.stream_ids()[1];
+
+ auto it =
+ params_.stream_label_to_simulcast_streams_count.find(stream_label);
+ if (it == params_.stream_label_to_simulcast_streams_count.end()) {
+ continue;
+ }
+ int svc_layers_count = it->second;
+
+ RTC_CHECK(stream.has_ssrc_groups()) << "Only SVC with RTX is supported";
+ RTC_CHECK_EQ(stream.ssrc_groups.size(), 1)
+ << "Too many ssrc groups in the track";
+ std::vector<uint32_t> primary_ssrcs;
+ stream.GetPrimarySsrcs(&primary_ssrcs);
+ RTC_CHECK(primary_ssrcs.size() == 1);
+ for (int i = 1; i < svc_layers_count; ++i) {
+ uint32_t ssrc = ssrcs_generator.GenerateId();
+ primary_ssrcs.push_back(ssrc);
+ stream.add_ssrc(ssrc);
+ stream.AddFidSsrc(ssrc, ssrcs_generator.GenerateId());
+ }
+ stream.ssrc_groups.push_back(
+ cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, primary_ssrcs));
+ }
+ auto offer_for_remote = CloneSessionDescription(offer.get());
+ return LocalAndRemoteSdp(std::move(offer), std::move(offer_for_remote));
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchAnswer(
+ std::unique_ptr<SessionDescriptionInterface> answer,
+ const VideoCodecConfig& first_codec) {
+ for (auto& content : answer->description()->contents()) {
+ cricket::MediaContentDescription* media_desc = content.media_description();
+ if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) {
+ continue;
+ }
+ if (content.media_description()->direction() !=
+ RtpTransceiverDirection::kRecvOnly) {
+ continue;
+ }
+ media_desc->set_conference_mode(params_.use_conference_mode);
+ }
+
+ if (!params_.stream_label_to_simulcast_streams_count.empty()) {
+ // Because simulcast enabled `params_.video_codecs` has only 1 element.
+ if (first_codec.name == cricket::kVp8CodecName) {
+ return PatchVp8Answer(std::move(answer));
+ }
+
+ if (first_codec.name == cricket::kVp9CodecName) {
+ return PatchVp9Answer(std::move(answer));
+ }
+ }
+
+ auto answer_for_remote = CloneSessionDescription(answer.get());
+ return LocalAndRemoteSdp(std::move(answer), std::move(answer_for_remote));
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer(
+ std::unique_ptr<SessionDescriptionInterface> answer) {
+ if (!context_.HasSimulcast()) {
+ auto answer_for_remote = CloneSessionDescription(answer.get());
+ return LocalAndRemoteSdp(std::move(answer), std::move(answer_for_remote));
+ }
+
+ std::unique_ptr<cricket::SessionDescription> desc =
+ answer->description()->Clone();
+
+ for (auto& info : context_.simulcast_infos) {
+ cricket::ContentInfo* simulcast_content =
+ desc->GetContentByName(info.rids[0]);
+ RTC_CHECK(simulcast_content);
+
+ // Get media description, which will be converted to simulcast answer.
+ std::unique_ptr<cricket::MediaContentDescription> media_desc =
+ simulcast_content->media_description()->Clone();
+ // Set `simulcast_content` to nullptr, because then it will be removed, so
+ // it will point to deleted object.
+ simulcast_content = nullptr;
+
+ // Remove separate media sections for simulcast streams.
+ for (auto& rid : info.rids) {
+ RTC_CHECK(desc->RemoveContentByName(rid));
+ }
+
+ // Patch `media_desc` to make it simulcast answer description.
+ // Restore mid/rid rtp header extensions
+ std::vector<webrtc::RtpExtension> extensions =
+ media_desc->rtp_header_extensions();
+ // First remove existing rid/mid header extensions.
+ extensions.erase(std::remove_if(extensions.begin(), extensions.end(),
+ [](const webrtc::RtpExtension& e) {
+ return e.uri == RtpExtension::kMidUri ||
+ e.uri == RtpExtension::kRidUri ||
+ e.uri ==
+ RtpExtension::kRepairedRidUri;
+ }));
+
+ // Then add right ones.
+ extensions.push_back(info.mid_extension);
+ extensions.push_back(info.rid_extension);
+ // extensions.push_back(info.rrid_extension);
+ media_desc->ClearRtpHeaderExtensions();
+ media_desc->set_rtp_header_extensions(extensions);
+
+ // Add StreamParams with rids for receive.
+ RTC_CHECK_EQ(media_desc->mutable_streams().size(), 0);
+ std::vector<cricket::RidDescription> rids;
+ for (auto& rid : info.rids) {
+ rids.emplace_back(rid, cricket::RidDirection::kReceive);
+ }
+ cricket::StreamParams stream_params;
+ stream_params.set_rids(rids);
+ media_desc->mutable_streams().push_back(stream_params);
+
+ // Restore SimulcastDescription. It should correspond to one from offer,
+ // but it have to have receive layers instead of send. So we need to put
+ // send layers from offer to receive layers in answer.
+ cricket::SimulcastDescription simulcast_description;
+ for (const auto& layer : info.simulcast_description.send_layers()) {
+ simulcast_description.receive_layers().AddLayerWithAlternatives(layer);
+ }
+ media_desc->set_simulcast_description(simulcast_description);
+
+ // Add simulcast media section.
+ desc->AddContent(info.mid, info.media_protocol_type, std::move(media_desc));
+ }
+
+ desc = RestoreMediaSectionsOrder(std::move(desc));
+
+ // Now we need to add bundle line to have all media bundled together.
+ cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE);
+ for (auto& content : desc->contents()) {
+ bundle_group.AddContentName(content.mid());
+ }
+ if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
+ desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ }
+ desc->AddGroup(bundle_group);
+
+ // Fix transport_infos: it have to have single info for simulcast section.
+ std::vector<cricket::TransportInfo> transport_infos = desc->transport_infos();
+ std::map<std::string, cricket::TransportDescription>
+ mid_to_transport_description;
+ for (auto info_it = transport_infos.begin();
+ info_it != transport_infos.end();) {
+ auto it = context_.simulcast_infos_by_rid.find(info_it->content_name);
+ if (it != context_.simulcast_infos_by_rid.end()) {
+ // This transport info correspond to some extra added media section.
+ mid_to_transport_description.insert(
+ {it->second->mid, info_it->description});
+ info_it = transport_infos.erase(info_it);
+ } else {
+ ++info_it;
+ }
+ }
+ for (auto& info : context_.simulcast_infos) {
+ transport_infos.emplace_back(info.mid,
+ mid_to_transport_description.at(info.mid));
+ }
+ desc->set_transport_infos(transport_infos);
+
+ auto patched_answer =
+ std::make_unique<JsepSessionDescription>(SdpType::kAnswer);
+ patched_answer->Initialize(std::move(desc), answer->session_id(),
+ answer->session_version());
+ return LocalAndRemoteSdp(std::move(answer), std::move(patched_answer));
+}
+
+std::unique_ptr<cricket::SessionDescription>
+SignalingInterceptor::RestoreMediaSectionsOrder(
+ std::unique_ptr<cricket::SessionDescription> source) {
+ std::unique_ptr<cricket::SessionDescription> out = source->Clone();
+ for (auto& mid : context_.mids_order) {
+ RTC_CHECK(out->RemoveContentByName(mid));
+ }
+ RTC_CHECK_EQ(out->contents().size(), 0);
+ for (auto& mid : context_.mids_order) {
+ cricket::ContentInfo* content = source->GetContentByName(mid);
+ RTC_CHECK(content);
+ out->AddContent(mid, content->type, content->media_description()->Clone());
+ }
+ return out;
+}
+
+LocalAndRemoteSdp SignalingInterceptor::PatchVp9Answer(
+ std::unique_ptr<SessionDescriptionInterface> answer) {
+ auto answer_for_remote = CloneSessionDescription(answer.get());
+ return LocalAndRemoteSdp(std::move(answer), std::move(answer_for_remote));
+}
+
+std::vector<std::unique_ptr<IceCandidateInterface>>
+SignalingInterceptor::PatchOffererIceCandidates(
+ rtc::ArrayView<const IceCandidateInterface* const> candidates) {
+ std::vector<std::unique_ptr<IceCandidateInterface>> out;
+ for (auto* candidate : candidates) {
+ auto simulcast_info_it =
+ context_.simulcast_infos_by_mid.find(candidate->sdp_mid());
+ if (simulcast_info_it != context_.simulcast_infos_by_mid.end()) {
+ // This is candidate for simulcast section, so it should be transformed
+ // into candidates for replicated sections. The sdpMLineIndex is set to
+ // -1 and ignored if the rid is present.
+ for (const std::string& rid : simulcast_info_it->second->rids) {
+ out.push_back(CreateIceCandidate(rid, -1, candidate->candidate()));
+ }
+ } else {
+ out.push_back(CreateIceCandidate(candidate->sdp_mid(),
+ candidate->sdp_mline_index(),
+ candidate->candidate()));
+ }
+ }
+ RTC_CHECK_GT(out.size(), 0);
+ return out;
+}
+
+std::vector<std::unique_ptr<IceCandidateInterface>>
+SignalingInterceptor::PatchAnswererIceCandidates(
+ rtc::ArrayView<const IceCandidateInterface* const> candidates) {
+ std::vector<std::unique_ptr<IceCandidateInterface>> out;
+ for (auto* candidate : candidates) {
+ auto simulcast_info_it =
+ context_.simulcast_infos_by_rid.find(candidate->sdp_mid());
+ if (simulcast_info_it != context_.simulcast_infos_by_rid.end()) {
+ // This is candidate for replicated section, created from single simulcast
+ // section, so it should be transformed into candidates for simulcast
+ // section.
+ out.push_back(CreateIceCandidate(simulcast_info_it->second->mid, 0,
+ candidate->candidate()));
+ } else if (!context_.simulcast_infos_by_rid.empty()) {
+ // When using simulcast and bundle, put everything on the first m-line.
+ out.push_back(CreateIceCandidate("", 0, candidate->candidate()));
+ } else {
+ out.push_back(CreateIceCandidate(candidate->sdp_mid(),
+ candidate->sdp_mline_index(),
+ candidate->candidate()));
+ }
+ }
+ RTC_CHECK_GT(out.size(), 0);
+ return out;
+}
+
+SignalingInterceptor::SimulcastSectionInfo::SimulcastSectionInfo(
+ const std::string& mid,
+ cricket::MediaProtocolType media_protocol_type,
+ const std::vector<cricket::RidDescription>& rids_desc)
+ : mid(mid), media_protocol_type(media_protocol_type) {
+ for (auto& rid : rids_desc) {
+ rids.push_back(rid.rid);
+ }
+}
+
+void SignalingInterceptor::SignalingContext::AddSimulcastInfo(
+ const SimulcastSectionInfo& info) {
+ simulcast_infos.push_back(info);
+ bool inserted =
+ simulcast_infos_by_mid.insert({info.mid, &simulcast_infos.back()}).second;
+ RTC_CHECK(inserted);
+ for (auto& rid : info.rids) {
+ inserted =
+ simulcast_infos_by_rid.insert({rid, &simulcast_infos.back()}).second;
+ RTC_CHECK(inserted);
+ }
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.h b/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.h
new file mode 100644
index 0000000000..6f68d03f52
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/sdp/sdp_changer.h
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_SDP_SDP_CHANGER_H_
+#define TEST_PC_E2E_SDP_SDP_CHANGER_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/jsep.h"
+#include "api/rtp_parameters.h"
+#include "api/test/pclf/media_configuration.h"
+#include "media/base/rid_description.h"
+#include "pc/session_description.h"
+#include "pc/simulcast_description.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Creates list of capabilities, which can be set on RtpTransceiverInterface via
+// RtpTransceiverInterface::SetCodecPreferences(...) to negotiate use of codecs
+// from list of `supported_codecs` which will match `video_codecs`. If flags
+// `ulpfec` or `flexfec` set to true corresponding FEC codec will be added.
+// FEC and RTX codecs will be added after required codecs.
+//
+// All codecs will be added only if they exists in the list of
+// `supported_codecs`. If multiple codecs from this list will match
+// `video_codecs`, then all of them will be added to the output
+// vector and they will be added in the same order, as they were in
+// `supported_codecs`.
+std::vector<RtpCodecCapability> FilterVideoCodecCapabilities(
+ rtc::ArrayView<const VideoCodecConfig> video_codecs,
+ bool use_rtx,
+ bool use_ulpfec,
+ bool use_flexfec,
+ rtc::ArrayView<const RtpCodecCapability> supported_codecs);
+
+struct LocalAndRemoteSdp {
+ LocalAndRemoteSdp(std::unique_ptr<SessionDescriptionInterface> local_sdp,
+ std::unique_ptr<SessionDescriptionInterface> remote_sdp)
+ : local_sdp(std::move(local_sdp)), remote_sdp(std::move(remote_sdp)) {}
+
+ // Sdp, that should be as local description on the peer, that created it.
+ std::unique_ptr<SessionDescriptionInterface> local_sdp;
+ // Sdp, that should be set as remote description on the peer opposite to the
+ // one, who created it.
+ std::unique_ptr<SessionDescriptionInterface> remote_sdp;
+};
+
+struct PatchingParams {
+ PatchingParams(
+ bool use_conference_mode,
+ std::map<std::string, int> stream_label_to_simulcast_streams_count)
+ : use_conference_mode(use_conference_mode),
+ stream_label_to_simulcast_streams_count(
+ stream_label_to_simulcast_streams_count) {}
+
+ bool use_conference_mode;
+ std::map<std::string, int> stream_label_to_simulcast_streams_count;
+};
+
+class SignalingInterceptor {
+ public:
+ explicit SignalingInterceptor(PatchingParams params) : params_(params) {}
+
+ LocalAndRemoteSdp PatchOffer(
+ std::unique_ptr<SessionDescriptionInterface> offer,
+ const VideoCodecConfig& first_codec);
+ LocalAndRemoteSdp PatchAnswer(
+ std::unique_ptr<SessionDescriptionInterface> answer,
+ const VideoCodecConfig& first_codec);
+
+ std::vector<std::unique_ptr<IceCandidateInterface>> PatchOffererIceCandidates(
+ rtc::ArrayView<const IceCandidateInterface* const> candidates);
+ std::vector<std::unique_ptr<IceCandidateInterface>>
+ PatchAnswererIceCandidates(
+ rtc::ArrayView<const IceCandidateInterface* const> candidates);
+
+ private:
+ // Contains information about simulcast section, that is required to perform
+ // modified offer/answer and ice candidates exchange.
+ struct SimulcastSectionInfo {
+ SimulcastSectionInfo(const std::string& mid,
+ cricket::MediaProtocolType media_protocol_type,
+ const std::vector<cricket::RidDescription>& rids_desc);
+
+ const std::string mid;
+ const cricket::MediaProtocolType media_protocol_type;
+ std::vector<std::string> rids;
+ cricket::SimulcastDescription simulcast_description;
+ webrtc::RtpExtension mid_extension;
+ webrtc::RtpExtension rid_extension;
+ webrtc::RtpExtension rrid_extension;
+ cricket::TransportDescription transport_description;
+ };
+
+ struct SignalingContext {
+ SignalingContext() = default;
+ // SignalingContext is not copyable and movable.
+ SignalingContext(SignalingContext&) = delete;
+ SignalingContext& operator=(SignalingContext&) = delete;
+ SignalingContext(SignalingContext&&) = delete;
+ SignalingContext& operator=(SignalingContext&&) = delete;
+
+ void AddSimulcastInfo(const SimulcastSectionInfo& info);
+ bool HasSimulcast() const { return !simulcast_infos.empty(); }
+
+ std::vector<SimulcastSectionInfo> simulcast_infos;
+ std::map<std::string, SimulcastSectionInfo*> simulcast_infos_by_mid;
+ std::map<std::string, SimulcastSectionInfo*> simulcast_infos_by_rid;
+
+ std::vector<std::string> mids_order;
+ };
+
+ LocalAndRemoteSdp PatchVp8Offer(
+ std::unique_ptr<SessionDescriptionInterface> offer);
+ LocalAndRemoteSdp PatchVp9Offer(
+ std::unique_ptr<SessionDescriptionInterface> offer);
+ LocalAndRemoteSdp PatchVp8Answer(
+ std::unique_ptr<SessionDescriptionInterface> answer);
+ LocalAndRemoteSdp PatchVp9Answer(
+ std::unique_ptr<SessionDescriptionInterface> answer);
+
+ void FillSimulcastContext(SessionDescriptionInterface* offer);
+ std::unique_ptr<cricket::SessionDescription> RestoreMediaSectionsOrder(
+ std::unique_ptr<cricket::SessionDescription> source);
+
+ PatchingParams params_;
+ SignalingContext context_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_SDP_SDP_CHANGER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc
new file mode 100644
index 0000000000..65dca5b518
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc
@@ -0,0 +1,592 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h"
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <type_traits>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/stats/rtc_stats.h"
+#include "api/stats/rtcstats_objects.h"
+#include "api/test/metrics/metric.h"
+#include "api/test/network_emulation/network_emulation_interfaces.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/units/data_rate.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/no_unique_address.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Unit;
+
+using NetworkLayerStats =
+ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStats;
+
+constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1);
+
+// Field trial which controls whether to report standard-compliant bytes
+// sent/received per stream. If enabled, padding and headers are not included
+// in bytes sent or received.
+constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats";
+
+EmulatedNetworkStats PopulateStats(std::vector<EmulatedEndpoint*> endpoints,
+ NetworkEmulationManager* network_emulation) {
+ rtc::Event stats_loaded;
+ EmulatedNetworkStats stats;
+ network_emulation->GetStats(endpoints, [&](EmulatedNetworkStats s) {
+ stats = std::move(s);
+ stats_loaded.Set();
+ });
+ bool stats_received = stats_loaded.Wait(kStatsWaitTimeout);
+ RTC_CHECK(stats_received);
+ return stats;
+}
+
+std::map<rtc::IPAddress, std::string> PopulateIpToPeer(
+ const std::map<std::string, std::vector<EmulatedEndpoint*>>&
+ peer_endpoints) {
+ std::map<rtc::IPAddress, std::string> out;
+ for (const auto& entry : peer_endpoints) {
+ for (const EmulatedEndpoint* const endpoint : entry.second) {
+ RTC_CHECK(out.find(endpoint->GetPeerLocalAddress()) == out.end())
+ << "Two peers can't share the same endpoint";
+ out.emplace(endpoint->GetPeerLocalAddress(), entry.first);
+ }
+ }
+ return out;
+}
+
+// Accumulates emulated network stats being executed on the network thread.
+// When all stats are collected stores it in thread safe variable.
+class EmulatedNetworkStatsAccumulator {
+ public:
+ // `expected_stats_count` - the number of calls to
+ // AddEndpointStats/AddUplinkStats/AddDownlinkStats the accumulator is going
+ // to wait. If called more than expected, the program will crash.
+ explicit EmulatedNetworkStatsAccumulator(size_t expected_stats_count)
+ : not_collected_stats_count_(expected_stats_count) {
+ RTC_DCHECK_GE(not_collected_stats_count_, 0);
+ if (not_collected_stats_count_ == 0) {
+ all_stats_collected_.Set();
+ }
+ sequence_checker_.Detach();
+ }
+
+ // Has to be executed on network thread.
+ void AddEndpointStats(std::string peer_name, EmulatedNetworkStats stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ n_stats_[peer_name].endpoints_stats = std::move(stats);
+ DecrementNotCollectedStatsCount();
+ }
+
+ // Has to be executed on network thread.
+ void AddUplinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ n_stats_[peer_name].uplink_stats = std::move(stats);
+ DecrementNotCollectedStatsCount();
+ }
+
+ // Has to be executed on network thread.
+ void AddDownlinkStats(std::string peer_name, EmulatedNetworkNodeStats stats) {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ n_stats_[peer_name].downlink_stats = std::move(stats);
+ DecrementNotCollectedStatsCount();
+ }
+
+ // Can be executed on any thread.
+ // Returns true if count down was completed and false if timeout elapsed
+ // before.
+ bool Wait(TimeDelta timeout) { return all_stats_collected_.Wait(timeout); }
+
+ // Can be called once. Returns all collected stats by moving underlying
+ // object.
+ std::map<std::string, NetworkLayerStats> ReleaseStats() {
+ RTC_DCHECK(!stats_released_);
+ stats_released_ = true;
+ MutexLock lock(&mutex_);
+ return std::move(stats_);
+ }
+
+ private:
+ void DecrementNotCollectedStatsCount() {
+ RTC_DCHECK_RUN_ON(&sequence_checker_);
+ RTC_CHECK_GT(not_collected_stats_count_, 0)
+ << "All stats are already collected";
+ not_collected_stats_count_--;
+ if (not_collected_stats_count_ == 0) {
+ MutexLock lock(&mutex_);
+ stats_ = std::move(n_stats_);
+ all_stats_collected_.Set();
+ }
+ }
+
+ RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
+ size_t not_collected_stats_count_ RTC_GUARDED_BY(sequence_checker_);
+ // Collected on the network thread. Moved into `stats_` after all stats are
+ // collected.
+ std::map<std::string, NetworkLayerStats> n_stats_
+ RTC_GUARDED_BY(sequence_checker_);
+
+ rtc::Event all_stats_collected_;
+ Mutex mutex_;
+ std::map<std::string, NetworkLayerStats> stats_ RTC_GUARDED_BY(mutex_);
+ bool stats_released_ = false;
+};
+
+} // namespace
+
+StatsBasedNetworkQualityMetricsReporter::
+ StatsBasedNetworkQualityMetricsReporter(
+ std::map<std::string, std::vector<EmulatedEndpoint*>> peer_endpoints,
+ NetworkEmulationManager* network_emulation,
+ test::MetricsLogger* metrics_logger)
+ : collector_(std::move(peer_endpoints), network_emulation),
+ clock_(network_emulation->time_controller()->GetClock()),
+ metrics_logger_(metrics_logger) {
+ RTC_CHECK(metrics_logger_);
+}
+
+StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector::
+ NetworkLayerStatsCollector(
+ std::map<std::string, std::vector<EmulatedEndpoint*>> peer_endpoints,
+ NetworkEmulationManager* network_emulation)
+ : peer_endpoints_(std::move(peer_endpoints)),
+ ip_to_peer_(PopulateIpToPeer(peer_endpoints_)),
+ network_emulation_(network_emulation) {}
+
+void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector::
+ Start() {
+ MutexLock lock(&mutex_);
+ // Check that network stats are clean before test execution.
+ for (const auto& entry : peer_endpoints_) {
+ EmulatedNetworkStats stats =
+ PopulateStats(entry.second, network_emulation_);
+ RTC_CHECK_EQ(stats.overall_outgoing_stats.packets_sent, 0);
+ RTC_CHECK_EQ(stats.overall_incoming_stats.packets_received, 0);
+ }
+}
+
+void StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector::
+ AddPeer(absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints,
+ std::vector<EmulatedNetworkNode*> uplink,
+ std::vector<EmulatedNetworkNode*> downlink) {
+ MutexLock lock(&mutex_);
+ // When new peer is added not in the constructor, don't check if it has empty
+ // stats, because their endpoint could be used for traffic before.
+ peer_endpoints_.emplace(peer_name, std::move(endpoints));
+ peer_uplinks_.emplace(peer_name, std::move(uplink));
+ peer_downlinks_.emplace(peer_name, std::move(downlink));
+ for (const EmulatedEndpoint* const endpoint : endpoints) {
+ RTC_CHECK(ip_to_peer_.find(endpoint->GetPeerLocalAddress()) ==
+ ip_to_peer_.end())
+ << "Two peers can't share the same endpoint";
+ ip_to_peer_.emplace(endpoint->GetPeerLocalAddress(), peer_name);
+ }
+}
+
+std::map<std::string, NetworkLayerStats>
+StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector::
+ GetStats() {
+ MutexLock lock(&mutex_);
+ EmulatedNetworkStatsAccumulator stats_accumulator(
+ peer_endpoints_.size() + peer_uplinks_.size() + peer_downlinks_.size());
+ for (const auto& entry : peer_endpoints_) {
+ network_emulation_->GetStats(
+ entry.second, [&stats_accumulator,
+ peer = entry.first](EmulatedNetworkStats s) mutable {
+ stats_accumulator.AddEndpointStats(std::move(peer), std::move(s));
+ });
+ }
+ for (const auto& entry : peer_uplinks_) {
+ network_emulation_->GetStats(
+ entry.second, [&stats_accumulator,
+ peer = entry.first](EmulatedNetworkNodeStats s) mutable {
+ stats_accumulator.AddUplinkStats(std::move(peer), std::move(s));
+ });
+ }
+ for (const auto& entry : peer_downlinks_) {
+ network_emulation_->GetStats(
+ entry.second, [&stats_accumulator,
+ peer = entry.first](EmulatedNetworkNodeStats s) mutable {
+ stats_accumulator.AddDownlinkStats(std::move(peer), std::move(s));
+ });
+ }
+ bool stats_collected = stats_accumulator.Wait(kStatsWaitTimeout);
+ RTC_CHECK(stats_collected);
+ std::map<std::string, NetworkLayerStats> peer_to_stats =
+ stats_accumulator.ReleaseStats();
+ std::map<std::string, std::vector<std::string>> sender_to_receivers;
+ for (const auto& entry : peer_endpoints_) {
+ const std::string& peer_name = entry.first;
+ const NetworkLayerStats& stats = peer_to_stats[peer_name];
+ for (const auto& income_stats_entry :
+ stats.endpoints_stats.incoming_stats_per_source) {
+ const rtc::IPAddress& source_ip = income_stats_entry.first;
+ auto it = ip_to_peer_.find(source_ip);
+ if (it == ip_to_peer_.end()) {
+ // Source IP is unknown for this collector, so will be skipped.
+ continue;
+ }
+ sender_to_receivers[it->second].push_back(peer_name);
+ }
+ }
+ for (auto& entry : peer_to_stats) {
+ const std::vector<std::string>& receivers =
+ sender_to_receivers[entry.first];
+ entry.second.receivers =
+ std::set<std::string>(receivers.begin(), receivers.end());
+ }
+ return peer_to_stats;
+}
+
+void StatsBasedNetworkQualityMetricsReporter::AddPeer(
+ absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints) {
+ collector_.AddPeer(peer_name, std::move(endpoints), /*uplink=*/{},
+ /*downlink=*/{});
+}
+
+void StatsBasedNetworkQualityMetricsReporter::AddPeer(
+ absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints,
+ std::vector<EmulatedNetworkNode*> uplink,
+ std::vector<EmulatedNetworkNode*> downlink) {
+ collector_.AddPeer(peer_name, std::move(endpoints), std::move(uplink),
+ std::move(downlink));
+}
+
+void StatsBasedNetworkQualityMetricsReporter::Start(
+ absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) {
+ test_case_name_ = std::string(test_case_name);
+ collector_.Start();
+ start_time_ = clock_->CurrentTime();
+}
+
+void StatsBasedNetworkQualityMetricsReporter::OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ PCStats cur_stats;
+
+ auto inbound_stats = report->GetStatsOfType<RTCInboundRTPStreamStats>();
+ for (const auto& stat : inbound_stats) {
+ cur_stats.payload_received +=
+ DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) +
+ stat->header_bytes_received.ValueOrDefault(0ul));
+ }
+
+ auto outbound_stats = report->GetStatsOfType<RTCOutboundRTPStreamStats>();
+ for (const auto& stat : outbound_stats) {
+ cur_stats.payload_sent +=
+ DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) +
+ stat->header_bytes_sent.ValueOrDefault(0ul));
+ }
+
+ auto candidate_pairs_stats = report->GetStatsOfType<RTCTransportStats>();
+ for (const auto& stat : candidate_pairs_stats) {
+ cur_stats.total_received +=
+ DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul));
+ cur_stats.total_sent +=
+ DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul));
+ cur_stats.packets_received += stat->packets_received.ValueOrDefault(0ul);
+ cur_stats.packets_sent += stat->packets_sent.ValueOrDefault(0ul);
+ }
+
+ MutexLock lock(&mutex_);
+ pc_stats_[std::string(pc_label)] = cur_stats;
+}
+
+void StatsBasedNetworkQualityMetricsReporter::StopAndReportResults() {
+ Timestamp end_time = clock_->CurrentTime();
+
+ if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) {
+ RTC_LOG(LS_ERROR)
+ << "Non-standard GetStats; \"payload\" counts include RTP headers";
+ }
+
+ std::map<std::string, NetworkLayerStats> stats = collector_.GetStats();
+ for (const auto& entry : stats) {
+ LogNetworkLayerStats(entry.first, entry.second);
+ }
+ MutexLock lock(&mutex_);
+ for (const auto& pair : pc_stats_) {
+ auto it = stats.find(pair.first);
+ RTC_CHECK(it != stats.end())
+ << "Peer name used for PeerConnection stats collection and peer name "
+ "used for endpoints naming doesn't match. No endpoints found for "
+ "peer "
+ << pair.first;
+ const NetworkLayerStats& network_layer_stats = it->second;
+ int64_t total_packets_received = 0;
+ bool found = false;
+ for (const auto& dest_peer : network_layer_stats.receivers) {
+ auto pc_stats_it = pc_stats_.find(dest_peer);
+ if (pc_stats_it == pc_stats_.end()) {
+ continue;
+ }
+ found = true;
+ total_packets_received += pc_stats_it->second.packets_received;
+ }
+ int64_t packet_loss = -1;
+ if (found) {
+ packet_loss = pair.second.packets_sent - total_packets_received;
+ }
+ ReportStats(pair.first, pair.second, network_layer_stats, packet_loss,
+ end_time);
+ }
+}
+
+void StatsBasedNetworkQualityMetricsReporter::ReportStats(
+ const std::string& pc_label,
+ const PCStats& pc_stats,
+ const NetworkLayerStats& network_layer_stats,
+ int64_t packet_loss,
+ const Timestamp& end_time) {
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> metric_metadata{
+ {MetricMetadataKey::kPeerMetadataKey, pc_label},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_discarded_no_receiver", GetTestCaseName(pc_label),
+ network_layer_stats.endpoints_stats.overall_incoming_stats
+ .bytes_discarded_no_receiver.bytes(),
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_discarded_no_receiver", GetTestCaseName(pc_label),
+ network_layer_stats.endpoints_stats.overall_incoming_stats
+ .packets_discarded_no_receiver,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+
+ metrics_logger_->LogSingleValueMetric(
+ "payload_bytes_received", GetTestCaseName(pc_label),
+ pc_stats.payload_received.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "payload_bytes_sent", GetTestCaseName(pc_label),
+ pc_stats.payload_sent.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_sent", GetTestCaseName(pc_label), pc_stats.total_sent.bytes(),
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_sent", GetTestCaseName(pc_label), pc_stats.packets_sent,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "average_send_rate", GetTestCaseName(pc_label),
+ (pc_stats.total_sent / (end_time - start_time_)).kbps<double>(),
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "bytes_received", GetTestCaseName(pc_label),
+ pc_stats.total_received.bytes(), Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "packets_received", GetTestCaseName(pc_label), pc_stats.packets_received,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "average_receive_rate", GetTestCaseName(pc_label),
+ (pc_stats.total_received / (end_time - start_time_)).kbps<double>(),
+ Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
+ metric_metadata);
+ metrics_logger_->LogSingleValueMetric(
+ "sent_packets_loss", GetTestCaseName(pc_label), packet_loss,
+ Unit::kUnitless, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+}
+
+std::string StatsBasedNetworkQualityMetricsReporter::GetTestCaseName(
+ absl::string_view network_label) const {
+ rtc::StringBuilder builder;
+ builder << test_case_name_ << "/" << network_label.data();
+ return builder.str();
+}
+
+void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats(
+ const std::string& peer_name,
+ const NetworkLayerStats& stats) const {
+ DataRate average_send_rate =
+ stats.endpoints_stats.overall_outgoing_stats.packets_sent >= 2
+ ? stats.endpoints_stats.overall_outgoing_stats.AverageSendRate()
+ : DataRate::Zero();
+ DataRate average_receive_rate =
+ stats.endpoints_stats.overall_incoming_stats.packets_received >= 2
+ ? stats.endpoints_stats.overall_incoming_stats.AverageReceiveRate()
+ : DataRate::Zero();
+ // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey.
+ std::map<std::string, std::string> metric_metadata{
+ {MetricMetadataKey::kPeerMetadataKey, peer_name},
+ {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}};
+ rtc::StringBuilder log;
+ log << "Raw network layer statistic for [" << peer_name << "]:\n"
+ << "Local IPs:\n";
+ for (size_t i = 0; i < stats.endpoints_stats.local_addresses.size(); ++i) {
+ log << " " << stats.endpoints_stats.local_addresses[i].ToString() << "\n";
+ }
+ if (!stats.endpoints_stats.overall_outgoing_stats.sent_packets_size
+ .IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "sent_packets_size", GetTestCaseName(peer_name),
+ stats.endpoints_stats.overall_outgoing_stats.sent_packets_size,
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!stats.endpoints_stats.overall_incoming_stats.received_packets_size
+ .IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "received_packets_size", GetTestCaseName(peer_name),
+ stats.endpoints_stats.overall_incoming_stats.received_packets_size,
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!stats.endpoints_stats.overall_incoming_stats
+ .packets_discarded_no_receiver_size.IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "packets_discarded_no_receiver_size", GetTestCaseName(peer_name),
+ stats.endpoints_stats.overall_incoming_stats
+ .packets_discarded_no_receiver_size,
+ Unit::kBytes, ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!stats.endpoints_stats.sent_packets_queue_wait_time_us.IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "sent_packets_queue_wait_time_us", GetTestCaseName(peer_name),
+ stats.endpoints_stats.sent_packets_queue_wait_time_us, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+
+ log << "Send statistic:\n"
+ << " packets: "
+ << stats.endpoints_stats.overall_outgoing_stats.packets_sent << " bytes: "
+ << stats.endpoints_stats.overall_outgoing_stats.bytes_sent.bytes()
+ << " avg_rate (bytes/sec): " << average_send_rate.bytes_per_sec()
+ << " avg_rate (bps): " << average_send_rate.bps() << "\n"
+ << "Send statistic per destination:\n";
+
+ for (const auto& entry :
+ stats.endpoints_stats.outgoing_stats_per_destination) {
+ DataRate source_average_send_rate = entry.second.packets_sent >= 2
+ ? entry.second.AverageSendRate()
+ : DataRate::Zero();
+ log << "(" << entry.first.ToString() << "):\n"
+ << " packets: " << entry.second.packets_sent
+ << " bytes: " << entry.second.bytes_sent.bytes()
+ << " avg_rate (bytes/sec): " << source_average_send_rate.bytes_per_sec()
+ << " avg_rate (bps): " << source_average_send_rate.bps() << "\n";
+ if (!entry.second.sent_packets_size.IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "sent_packets_size",
+ GetTestCaseName(peer_name + "/" + entry.first.ToString()),
+ entry.second.sent_packets_size, Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ }
+
+ if (!stats.uplink_stats.packet_transport_time.IsEmpty()) {
+ log << "[Debug stats] packet_transport_time=("
+ << stats.uplink_stats.packet_transport_time.GetAverage() << ", "
+ << stats.uplink_stats.packet_transport_time.GetStandardDeviation()
+ << ")\n";
+ metrics_logger_->LogMetric(
+ "uplink_packet_transport_time", GetTestCaseName(peer_name),
+ stats.uplink_stats.packet_transport_time, Unit::kMilliseconds,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!stats.uplink_stats.size_to_packet_transport_time.IsEmpty()) {
+ log << "[Debug stats] size_to_packet_transport_time=("
+ << stats.uplink_stats.size_to_packet_transport_time.GetAverage() << ", "
+ << stats.uplink_stats.size_to_packet_transport_time
+ .GetStandardDeviation()
+ << ")\n";
+ metrics_logger_->LogMetric(
+ "uplink_size_to_packet_transport_time", GetTestCaseName(peer_name),
+ stats.uplink_stats.size_to_packet_transport_time, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+
+ log << "Receive statistic:\n"
+ << " packets: "
+ << stats.endpoints_stats.overall_incoming_stats.packets_received
+ << " bytes: "
+ << stats.endpoints_stats.overall_incoming_stats.bytes_received.bytes()
+ << " avg_rate (bytes/sec): " << average_receive_rate.bytes_per_sec()
+ << " avg_rate (bps): " << average_receive_rate.bps() << "\n"
+ << "Receive statistic per source:\n";
+
+ for (const auto& entry : stats.endpoints_stats.incoming_stats_per_source) {
+ DataRate source_average_receive_rate =
+ entry.second.packets_received >= 2 ? entry.second.AverageReceiveRate()
+ : DataRate::Zero();
+ log << "(" << entry.first.ToString() << "):\n"
+ << " packets: " << entry.second.packets_received
+ << " bytes: " << entry.second.bytes_received.bytes()
+ << " avg_rate (bytes/sec): "
+ << source_average_receive_rate.bytes_per_sec()
+ << " avg_rate (bps): " << source_average_receive_rate.bps() << "\n";
+ if (!entry.second.received_packets_size.IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "received_packets_size",
+ GetTestCaseName(peer_name + "/" + entry.first.ToString()),
+ entry.second.received_packets_size, Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!entry.second.packets_discarded_no_receiver_size.IsEmpty()) {
+ metrics_logger_->LogMetric(
+ "packets_discarded_no_receiver_size",
+ GetTestCaseName(peer_name + "/" + entry.first.ToString()),
+ entry.second.packets_discarded_no_receiver_size, Unit::kBytes,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ }
+ if (!stats.downlink_stats.packet_transport_time.IsEmpty()) {
+ log << "[Debug stats] packet_transport_time=("
+ << stats.downlink_stats.packet_transport_time.GetAverage() << ", "
+ << stats.downlink_stats.packet_transport_time.GetStandardDeviation()
+ << ")\n";
+ metrics_logger_->LogMetric(
+ "downlink_packet_transport_time", GetTestCaseName(peer_name),
+ stats.downlink_stats.packet_transport_time, Unit::kMilliseconds,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+ if (!stats.downlink_stats.size_to_packet_transport_time.IsEmpty()) {
+ log << "[Debug stats] size_to_packet_transport_time=("
+ << stats.downlink_stats.size_to_packet_transport_time.GetAverage()
+ << ", "
+ << stats.downlink_stats.size_to_packet_transport_time
+ .GetStandardDeviation()
+ << ")\n";
+ metrics_logger_->LogMetric(
+ "downlink_size_to_packet_transport_time", GetTestCaseName(peer_name),
+ stats.downlink_stats.size_to_packet_transport_time, Unit::kUnitless,
+ ImprovementDirection::kNeitherIsBetter, metric_metadata);
+ }
+
+ RTC_LOG(LS_INFO) << log.str();
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h
new file mode 100644
index 0000000000..60daf40c8c
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_
+#define TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_
+
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/network_emulation/network_emulation_interfaces.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/data_size.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// TODO(titovartem): make this class testable and add tests.
+class StatsBasedNetworkQualityMetricsReporter
+ : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter {
+ public:
+ // Emulated network layer stats for single peer.
+ struct NetworkLayerStats {
+ EmulatedNetworkStats endpoints_stats;
+ EmulatedNetworkNodeStats uplink_stats;
+ EmulatedNetworkNodeStats downlink_stats;
+ std::set<std::string> receivers;
+ };
+
+ // `networks` map peer name to network to report network layer stability stats
+ // and to log network layer metrics.
+ StatsBasedNetworkQualityMetricsReporter(
+ std::map<std::string, std::vector<EmulatedEndpoint*>> peer_endpoints,
+ NetworkEmulationManager* network_emulation,
+ test::MetricsLogger* metrics_logger);
+ ~StatsBasedNetworkQualityMetricsReporter() override = default;
+
+ void AddPeer(absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints);
+ void AddPeer(absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints,
+ std::vector<EmulatedNetworkNode*> uplink,
+ std::vector<EmulatedNetworkNode*> downlink);
+
+ // Network stats must be empty when this method will be invoked.
+ void Start(absl::string_view test_case_name,
+ const TrackIdStreamInfoMap* reporter_helper) override;
+ void OnStatsReports(
+ absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+ void StopAndReportResults() override;
+
+ private:
+ struct PCStats {
+ // TODO(bugs.webrtc.org/10525): Separate audio and video counters. Depends
+ // on standard stat counters, enabled by field trial
+ // "WebRTC-UseStandardBytesStats".
+ DataSize payload_received = DataSize::Zero();
+ DataSize payload_sent = DataSize::Zero();
+
+ // Total bytes/packets sent/received in all RTCTransport's.
+ DataSize total_received = DataSize::Zero();
+ DataSize total_sent = DataSize::Zero();
+ int64_t packets_received = 0;
+ int64_t packets_sent = 0;
+ };
+
+ class NetworkLayerStatsCollector {
+ public:
+ NetworkLayerStatsCollector(
+ std::map<std::string, std::vector<EmulatedEndpoint*>> peer_endpoints,
+ NetworkEmulationManager* network_emulation);
+
+ void Start();
+
+ void AddPeer(absl::string_view peer_name,
+ std::vector<EmulatedEndpoint*> endpoints,
+ std::vector<EmulatedNetworkNode*> uplink,
+ std::vector<EmulatedNetworkNode*> downlink);
+
+ std::map<std::string, NetworkLayerStats> GetStats();
+
+ private:
+ Mutex mutex_;
+ std::map<std::string, std::vector<EmulatedEndpoint*>> peer_endpoints_
+ RTC_GUARDED_BY(mutex_);
+ std::map<std::string, std::vector<EmulatedNetworkNode*>> peer_uplinks_
+ RTC_GUARDED_BY(mutex_);
+ std::map<std::string, std::vector<EmulatedNetworkNode*>> peer_downlinks_
+ RTC_GUARDED_BY(mutex_);
+ std::map<rtc::IPAddress, std::string> ip_to_peer_ RTC_GUARDED_BY(mutex_);
+ NetworkEmulationManager* const network_emulation_;
+ };
+
+ void ReportStats(const std::string& pc_label,
+ const PCStats& pc_stats,
+ const NetworkLayerStats& network_layer_stats,
+ int64_t packet_loss,
+ const Timestamp& end_time);
+ std::string GetTestCaseName(absl::string_view network_label) const;
+ void LogNetworkLayerStats(const std::string& peer_name,
+ const NetworkLayerStats& stats) const;
+
+ NetworkLayerStatsCollector collector_;
+ Clock* const clock_;
+ test::MetricsLogger* const metrics_logger_;
+
+ std::string test_case_name_;
+ Timestamp start_time_ = Timestamp::MinusInfinity();
+
+ Mutex mutex_;
+ std::map<std::string, PCStats> pc_stats_ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_STATS_BASED_NETWORK_QUALITY_METRICS_REPORTER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc
new file mode 100644
index 0000000000..be55149482
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h"
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/test/create_network_emulation_manager.h"
+#include "api/test/create_peer_connection_quality_test_frame_generator.h"
+#include "api/test/metrics/metrics_logger.h"
+#include "api/test/metrics/stdout_metrics_exporter.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/peerconnection_quality_test_fixture.h"
+#include "api/units/time_delta.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/pc/e2e/metric_metadata_keys.h"
+#include "test/pc/e2e/peer_connection_quality_test.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::UnorderedElementsAre;
+
+using ::webrtc::test::DefaultMetricsLogger;
+using ::webrtc::test::ImprovementDirection;
+using ::webrtc::test::Metric;
+using ::webrtc::test::Unit;
+using ::webrtc::webrtc_pc_e2e::PeerConfigurer;
+
+// Adds a peer with some audio and video (the client should not care about
+// details about audio and video configs).
+void AddDefaultAudioVideoPeer(
+ absl::string_view peer_name,
+ absl::string_view audio_stream_label,
+ absl::string_view video_stream_label,
+ const PeerNetworkDependencies& network_dependencies,
+ PeerConnectionE2EQualityTestFixture& fixture) {
+ AudioConfig audio{std::string(audio_stream_label)};
+ audio.sync_group = std::string(peer_name);
+ VideoConfig video(std::string(video_stream_label), 320, 180, 15);
+ video.sync_group = std::string(peer_name);
+ auto peer = std::make_unique<PeerConfigurer>(network_dependencies);
+ peer->SetName(peer_name);
+ peer->SetAudioConfig(std::move(audio));
+ peer->AddVideoConfig(std::move(video));
+ peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)});
+ fixture.AddPeer(std::move(peer));
+}
+
+absl::optional<Metric> FindMeetricByName(absl::string_view name,
+ rtc::ArrayView<const Metric> metrics) {
+ for (const Metric& metric : metrics) {
+ if (metric.name == name) {
+ return metric;
+ }
+ }
+ return absl::nullopt;
+}
+
+TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) {
+ std::unique_ptr<NetworkEmulationManager> network_emulation =
+ CreateNetworkEmulationManager(TimeMode::kSimulated,
+ EmulatedNetworkStatsGatheringMode::kDebug);
+ DefaultMetricsLogger metrics_logger(
+ network_emulation->time_controller()->GetClock());
+ PeerConnectionE2EQualityTest fixture(
+ "test_case", *network_emulation->time_controller(),
+ /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr,
+ &metrics_logger);
+
+ EmulatedEndpoint* alice_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+ EmulatedEndpoint* bob_endpoint =
+ network_emulation->CreateEndpoint(EmulatedEndpointConfig());
+
+ EmulatedNetworkNode* alice_link = network_emulation->CreateEmulatedNode(
+ BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500});
+ network_emulation->CreateRoute(alice_endpoint, {alice_link}, bob_endpoint);
+ EmulatedNetworkNode* bob_link = network_emulation->CreateEmulatedNode(
+ BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500});
+ network_emulation->CreateRoute(bob_endpoint, {bob_link}, alice_endpoint);
+
+ EmulatedNetworkManagerInterface* alice_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface(
+ {alice_endpoint});
+ EmulatedNetworkManagerInterface* bob_network =
+ network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint});
+
+ AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video",
+ alice_network->network_dependencies(), fixture);
+ AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video",
+ bob_network->network_dependencies(), fixture);
+
+ auto network_stats_reporter =
+ std::make_unique<StatsBasedNetworkQualityMetricsReporter>(
+ /*peer_endpoints=*/std::map<std::string,
+ std::vector<EmulatedEndpoint*>>{},
+ network_emulation.get(), &metrics_logger);
+ network_stats_reporter->AddPeer("alice", alice_network->endpoints(),
+ /*uplink=*/{alice_link},
+ /*downlink=*/{bob_link});
+ network_stats_reporter->AddPeer("bob", bob_network->endpoints(),
+ /*uplink=*/{bob_link},
+ /*downlink=*/{alice_link});
+ fixture.AddQualityMetricsReporter(std::move(network_stats_reporter));
+
+ fixture.Run(RunParams(TimeDelta::Seconds(4)));
+
+ std::vector<Metric> metrics = metrics_logger.GetCollectedMetrics();
+ absl::optional<Metric> uplink_packet_transport_time =
+ FindMeetricByName("uplink_packet_transport_time", metrics);
+ ASSERT_TRUE(uplink_packet_transport_time.has_value());
+ ASSERT_FALSE(uplink_packet_transport_time->time_series.samples.empty());
+ absl::optional<Metric> uplink_size_to_packet_transport_time =
+ FindMeetricByName("uplink_size_to_packet_transport_time", metrics);
+ ASSERT_TRUE(uplink_size_to_packet_transport_time.has_value());
+ ASSERT_FALSE(
+ uplink_size_to_packet_transport_time->time_series.samples.empty());
+ absl::optional<Metric> downlink_packet_transport_time =
+ FindMeetricByName("downlink_packet_transport_time", metrics);
+ ASSERT_TRUE(downlink_packet_transport_time.has_value());
+ ASSERT_FALSE(downlink_packet_transport_time->time_series.samples.empty());
+ absl::optional<Metric> downlink_size_to_packet_transport_time =
+ FindMeetricByName("downlink_size_to_packet_transport_time", metrics);
+ ASSERT_TRUE(downlink_size_to_packet_transport_time.has_value());
+ ASSERT_FALSE(
+ downlink_size_to_packet_transport_time->time_series.samples.empty());
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_poller.cc b/third_party/libwebrtc/test/pc/e2e/stats_poller.cc
new file mode 100644
index 0000000000..c04805fb20
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_poller.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/stats_poller.h"
+
+#include <utility>
+
+#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+void InternalStatsObserver::PollStats() {
+ peer_->GetStats(this);
+}
+
+void InternalStatsObserver::OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ for (auto* observer : observers_) {
+ observer->OnStatsReports(pc_label_, report);
+ }
+}
+
+StatsPoller::StatsPoller(std::vector<StatsObserverInterface*> observers,
+ std::map<std::string, StatsProvider*> peers)
+ : observers_(std::move(observers)) {
+ webrtc::MutexLock lock(&mutex_);
+ for (auto& peer : peers) {
+ pollers_.push_back(rtc::make_ref_counted<InternalStatsObserver>(
+ peer.first, peer.second, observers_));
+ }
+}
+
+StatsPoller::StatsPoller(std::vector<StatsObserverInterface*> observers,
+ std::map<std::string, TestPeer*> peers)
+ : observers_(std::move(observers)) {
+ webrtc::MutexLock lock(&mutex_);
+ for (auto& peer : peers) {
+ pollers_.push_back(rtc::make_ref_counted<InternalStatsObserver>(
+ peer.first, peer.second, observers_));
+ }
+}
+
+void StatsPoller::PollStatsAndNotifyObservers() {
+ webrtc::MutexLock lock(&mutex_);
+ for (auto& poller : pollers_) {
+ poller->PollStats();
+ }
+}
+
+void StatsPoller::RegisterParticipantInCall(absl::string_view peer_name,
+ StatsProvider* peer) {
+ webrtc::MutexLock lock(&mutex_);
+ pollers_.push_back(rtc::make_ref_counted<InternalStatsObserver>(
+ peer_name, peer, observers_));
+}
+
+bool StatsPoller::UnregisterParticipantInCall(absl::string_view peer_name) {
+ webrtc::MutexLock lock(&mutex_);
+ for (auto it = pollers_.begin(); it != pollers_.end(); ++it) {
+ if ((*it)->pc_label() == peer_name) {
+ pollers_.erase(it);
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_poller.h b/third_party/libwebrtc/test/pc/e2e/stats_poller.h
new file mode 100644
index 0000000000..3576f1bf05
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_poller.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_STATS_POLLER_H_
+#define TEST_PC_E2E_STATS_POLLER_H_
+
+#include <map>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "api/test/stats_observer_interface.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread_annotations.h"
+#include "test/pc/e2e/stats_provider.h"
+#include "test/pc/e2e/test_peer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Helper class that will notify all the webrtc::test::StatsObserverInterface
+// objects subscribed.
+class InternalStatsObserver : public RTCStatsCollectorCallback {
+ public:
+ InternalStatsObserver(absl::string_view pc_label,
+ StatsProvider* peer,
+ std::vector<StatsObserverInterface*> observers)
+ : pc_label_(pc_label), peer_(peer), observers_(std::move(observers)) {}
+
+ std::string pc_label() const { return pc_label_; }
+
+ void PollStats();
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+
+ private:
+ std::string pc_label_;
+ StatsProvider* peer_;
+ std::vector<StatsObserverInterface*> observers_;
+};
+
+// Helper class to invoke GetStats on a PeerConnection by passing a
+// webrtc::StatsObserver that will notify all the
+// webrtc::test::StatsObserverInterface subscribed.
+class StatsPoller {
+ public:
+ StatsPoller(std::vector<StatsObserverInterface*> observers,
+ std::map<std::string, StatsProvider*> peers_to_observe);
+ StatsPoller(std::vector<StatsObserverInterface*> observers,
+ std::map<std::string, TestPeer*> peers_to_observe);
+
+ void PollStatsAndNotifyObservers();
+
+ void RegisterParticipantInCall(absl::string_view peer_name,
+ StatsProvider* peer);
+ // Unregister participant from stats poller. Returns true if participant was
+ // removed and false if participant wasn't found.
+ bool UnregisterParticipantInCall(absl::string_view peer_name);
+
+ private:
+ const std::vector<StatsObserverInterface*> observers_;
+ webrtc::Mutex mutex_;
+ std::vector<rtc::scoped_refptr<InternalStatsObserver>> pollers_
+ RTC_GUARDED_BY(mutex_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_STATS_POLLER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_poller_test.cc b/third_party/libwebrtc/test/pc/e2e/stats_poller_test.cc
new file mode 100644
index 0000000000..02a323127b
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_poller_test.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/stats_poller.h"
+
+#include "api/stats/rtc_stats_collector_callback.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using ::testing::Eq;
+
+class TestStatsProvider : public StatsProvider {
+ public:
+ ~TestStatsProvider() override = default;
+
+ void GetStats(RTCStatsCollectorCallback* callback) override {
+ stats_collections_count_++;
+ }
+
+ int stats_collections_count() const { return stats_collections_count_; }
+
+ private:
+ int stats_collections_count_ = 0;
+};
+
+class MockStatsObserver : public StatsObserverInterface {
+ public:
+ ~MockStatsObserver() override = default;
+
+ MOCK_METHOD(void,
+ OnStatsReports,
+ (absl::string_view pc_label,
+ const rtc::scoped_refptr<const RTCStatsReport>& report));
+};
+
+TEST(StatsPollerTest, UnregisterParticipantAddedInCtor) {
+ TestStatsProvider alice;
+ TestStatsProvider bob;
+
+ MockStatsObserver stats_observer;
+
+ StatsPoller poller(/*observers=*/{&stats_observer},
+ /*peers_to_observe=*/{{"alice", &alice}, {"bob", &bob}});
+ poller.PollStatsAndNotifyObservers();
+
+ EXPECT_THAT(alice.stats_collections_count(), Eq(1));
+ EXPECT_THAT(bob.stats_collections_count(), Eq(1));
+
+ poller.UnregisterParticipantInCall("bob");
+ poller.PollStatsAndNotifyObservers();
+
+ EXPECT_THAT(alice.stats_collections_count(), Eq(2));
+ EXPECT_THAT(bob.stats_collections_count(), Eq(1));
+}
+
+TEST(StatsPollerTest, UnregisterParticipantRegisteredInCall) {
+ TestStatsProvider alice;
+ TestStatsProvider bob;
+
+ MockStatsObserver stats_observer;
+
+ StatsPoller poller(/*observers=*/{&stats_observer},
+ /*peers_to_observe=*/{{"alice", &alice}});
+ poller.RegisterParticipantInCall("bob", &bob);
+ poller.PollStatsAndNotifyObservers();
+
+ EXPECT_THAT(alice.stats_collections_count(), Eq(1));
+ EXPECT_THAT(bob.stats_collections_count(), Eq(1));
+
+ poller.UnregisterParticipantInCall("bob");
+ poller.PollStatsAndNotifyObservers();
+
+ EXPECT_THAT(alice.stats_collections_count(), Eq(2));
+ EXPECT_THAT(bob.stats_collections_count(), Eq(1));
+}
+
+} // namespace
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/stats_provider.h b/third_party/libwebrtc/test/pc/e2e/stats_provider.h
new file mode 100644
index 0000000000..eef62d779c
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/stats_provider.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_STATS_PROVIDER_H_
+#define TEST_PC_E2E_STATS_PROVIDER_H_
+
+#include "api/stats/rtc_stats_collector_callback.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class StatsProvider {
+ public:
+ virtual ~StatsProvider() = default;
+
+ virtual void GetStats(RTCStatsCollectorCallback* callback) = 0;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_STATS_PROVIDER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/test_activities_executor.cc b/third_party/libwebrtc/test/pc/e2e/test_activities_executor.cc
new file mode 100644
index 0000000000..7bcf7dd6c3
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_activities_executor.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/pc/e2e/test_activities_executor.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/task_queue_for_test.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+void TestActivitiesExecutor::Start(TaskQueueBase* task_queue) {
+ RTC_DCHECK(task_queue);
+ task_queue_ = task_queue;
+ MutexLock lock(&lock_);
+ start_time_ = Now();
+ while (!scheduled_activities_.empty()) {
+ PostActivity(std::move(scheduled_activities_.front()));
+ scheduled_activities_.pop();
+ }
+}
+
+void TestActivitiesExecutor::Stop() {
+ if (task_queue_ == nullptr) {
+ // Already stopped or not started.
+ return;
+ }
+ SendTask(task_queue_, [this]() {
+ MutexLock lock(&lock_);
+ for (auto& handle : repeating_task_handles_) {
+ handle.Stop();
+ }
+ });
+ task_queue_ = nullptr;
+}
+
+void TestActivitiesExecutor::ScheduleActivity(
+ TimeDelta initial_delay_since_start,
+ absl::optional<TimeDelta> interval,
+ std::function<void(TimeDelta)> func) {
+ RTC_CHECK(initial_delay_since_start.IsFinite() &&
+ initial_delay_since_start >= TimeDelta::Zero());
+ RTC_CHECK(!interval ||
+ (interval->IsFinite() && *interval > TimeDelta::Zero()));
+ MutexLock lock(&lock_);
+ ScheduledActivity activity(initial_delay_since_start, interval, func);
+ if (start_time_.IsInfinite()) {
+ scheduled_activities_.push(std::move(activity));
+ } else {
+ PostActivity(std::move(activity));
+ }
+}
+
+void TestActivitiesExecutor::PostActivity(ScheduledActivity activity) {
+ // Because start_time_ will never change at this point copy it to local
+ // variable to capture in in lambda without requirement to hold a lock.
+ Timestamp start_time = start_time_;
+
+ TimeDelta remaining_delay =
+ activity.initial_delay_since_start == TimeDelta::Zero()
+ ? TimeDelta::Zero()
+ : activity.initial_delay_since_start - (Now() - start_time);
+ if (remaining_delay < TimeDelta::Zero()) {
+ RTC_LOG(LS_WARNING) << "Executing late task immediately, late by="
+ << ToString(remaining_delay.Abs());
+ remaining_delay = TimeDelta::Zero();
+ }
+
+ if (activity.interval) {
+ if (remaining_delay == TimeDelta::Zero()) {
+ repeating_task_handles_.push_back(RepeatingTaskHandle::Start(
+ task_queue_, [activity, start_time, this]() {
+ activity.func(Now() - start_time);
+ return *activity.interval;
+ }));
+ return;
+ }
+ repeating_task_handles_.push_back(RepeatingTaskHandle::DelayedStart(
+ task_queue_, remaining_delay, [activity, start_time, this]() {
+ activity.func(Now() - start_time);
+ return *activity.interval;
+ }));
+ return;
+ }
+
+ if (remaining_delay == TimeDelta::Zero()) {
+ task_queue_->PostTask(
+ [activity, start_time, this]() { activity.func(Now() - start_time); });
+ return;
+ }
+
+ task_queue_->PostDelayedTask(
+ [activity, start_time, this]() { activity.func(Now() - start_time); },
+ remaining_delay);
+}
+
+Timestamp TestActivitiesExecutor::Now() const {
+ return clock_->CurrentTime();
+}
+
+TestActivitiesExecutor::ScheduledActivity::ScheduledActivity(
+ TimeDelta initial_delay_since_start,
+ absl::optional<TimeDelta> interval,
+ std::function<void(TimeDelta)> func)
+ : initial_delay_since_start(initial_delay_since_start),
+ interval(interval),
+ func(std::move(func)) {}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/test_activities_executor.h b/third_party/libwebrtc/test/pc/e2e/test_activities_executor.h
new file mode 100644
index 0000000000..2469ac7f36
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_activities_executor.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_
+#define TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_
+
+#include <queue>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+class TestActivitiesExecutor {
+ public:
+ explicit TestActivitiesExecutor(Clock* clock) : clock_(clock) {}
+ ~TestActivitiesExecutor() { Stop(); }
+
+ // Starts scheduled activities according to their schedule. All activities
+ // that will be scheduled after Start(...) was invoked will be executed
+ // immediately according to their schedule.
+ void Start(TaskQueueForTest* task_queue) { Start(task_queue->Get()); }
+ void Start(TaskQueueBase* task_queue);
+ void Stop();
+
+ // Schedule activity to be executed. If test isn't started yet, then activity
+ // will be executed according to its schedule after Start() will be invoked.
+ // If test is started, then it will be executed immediately according to its
+ // schedule.
+ void ScheduleActivity(TimeDelta initial_delay_since_start,
+ absl::optional<TimeDelta> interval,
+ std::function<void(TimeDelta)> func);
+
+ private:
+ struct ScheduledActivity {
+ ScheduledActivity(TimeDelta initial_delay_since_start,
+ absl::optional<TimeDelta> interval,
+ std::function<void(TimeDelta)> func);
+
+ TimeDelta initial_delay_since_start;
+ absl::optional<TimeDelta> interval;
+ std::function<void(TimeDelta)> func;
+ };
+
+ void PostActivity(ScheduledActivity activity)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
+ Timestamp Now() const;
+
+ Clock* const clock_;
+
+ TaskQueueBase* task_queue_;
+
+ Mutex lock_;
+ // Time when test was started. Minus infinity means that it wasn't started
+ // yet.
+ Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
+ // Queue of activities that were added before test was started.
+ // Activities from this queue will be posted on the `task_queue_` after test
+ // will be set up and then this queue will be unused.
+ std::queue<ScheduledActivity> scheduled_activities_ RTC_GUARDED_BY(lock_);
+ // List of task handles for activities, that are posted on `task_queue_` as
+ // repeated during the call.
+ std::vector<RepeatingTaskHandle> repeating_task_handles_
+ RTC_GUARDED_BY(lock_);
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/test_peer.cc b/third_party/libwebrtc/test/pc/e2e/test_peer.cc
new file mode 100644
index 0000000000..b3a9e1c164
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_peer.cc
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/test_peer.h"
+
+#include <string>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/scoped_refptr.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "modules/audio_processing/include/audio_processing.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+class SetRemoteDescriptionCallback
+ : public webrtc::SetRemoteDescriptionObserverInterface {
+ public:
+ void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override {
+ is_called_ = true;
+ error_ = error;
+ }
+
+ bool is_called() const { return is_called_; }
+
+ webrtc::RTCError error() const { return error_; }
+
+ private:
+ bool is_called_ = false;
+ webrtc::RTCError error_;
+};
+
+} // namespace
+
+ConfigurableParams TestPeer::configurable_params() const {
+ MutexLock lock(&mutex_);
+ return configurable_params_;
+}
+
+void TestPeer::AddVideoConfig(VideoConfig config) {
+ MutexLock lock(&mutex_);
+ configurable_params_.video_configs.push_back(std::move(config));
+}
+
+void TestPeer::RemoveVideoConfig(absl::string_view stream_label) {
+ MutexLock lock(&mutex_);
+ bool config_removed = false;
+ for (auto it = configurable_params_.video_configs.begin();
+ it != configurable_params_.video_configs.end(); ++it) {
+ if (*it->stream_label == stream_label) {
+ configurable_params_.video_configs.erase(it);
+ config_removed = true;
+ break;
+ }
+ }
+ RTC_CHECK(config_removed) << *params_.name << ": No video config with label ["
+ << stream_label << "] was found";
+}
+
+void TestPeer::SetVideoSubscription(VideoSubscription subscription) {
+ MutexLock lock(&mutex_);
+ configurable_params_.video_subscription = std::move(subscription);
+}
+
+void TestPeer::GetStats(RTCStatsCollectorCallback* callback) {
+ pc()->signaling_thread()->PostTask(
+ SafeTask(signaling_thread_task_safety_,
+ [this, callback]() { pc()->GetStats(callback); }));
+}
+
+bool TestPeer::SetRemoteDescription(
+ std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+
+ auto observer = rtc::make_ref_counted<SetRemoteDescriptionCallback>();
+ // We're assuming (and asserting) that the PeerConnection implementation of
+ // SetRemoteDescription is synchronous when called on the signaling thread.
+ pc()->SetRemoteDescription(std::move(desc), observer);
+ RTC_CHECK(observer->is_called());
+ if (!observer->error().ok()) {
+ RTC_LOG(LS_ERROR) << *params_.name << ": Failed to set remote description: "
+ << observer->error().message();
+ if (error_out) {
+ *error_out = observer->error().message();
+ }
+ }
+ return observer->error().ok();
+}
+
+bool TestPeer::AddIceCandidates(
+ std::vector<std::unique_ptr<IceCandidateInterface>> candidates) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ bool success = true;
+ for (auto& candidate : candidates) {
+ if (!pc()->AddIceCandidate(candidate.get())) {
+ std::string candidate_str;
+ bool res = candidate->ToString(&candidate_str);
+ RTC_CHECK(res);
+ RTC_LOG(LS_ERROR) << "Failed to add ICE candidate, candidate_str="
+ << candidate_str;
+ success = false;
+ } else {
+ remote_ice_candidates_.push_back(std::move(candidate));
+ }
+ }
+ return success;
+}
+
+void TestPeer::Close() {
+ signaling_thread_task_safety_->SetNotAlive();
+ wrapper_->pc()->Close();
+ remote_ice_candidates_.clear();
+ audio_processing_ = nullptr;
+ video_sources_.clear();
+ wrapper_ = nullptr;
+ worker_thread_ = nullptr;
+}
+
+TestPeer::TestPeer(
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
+ rtc::scoped_refptr<PeerConnectionInterface> pc,
+ std::unique_ptr<MockPeerConnectionObserver> observer,
+ Params params,
+ ConfigurableParams configurable_params,
+ std::vector<PeerConfigurer::VideoSource> video_sources,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<rtc::Thread> worker_thread)
+ : params_(std::move(params)),
+ configurable_params_(std::move(configurable_params)),
+ worker_thread_(std::move(worker_thread)),
+ wrapper_(std::make_unique<PeerConnectionWrapper>(std::move(pc_factory),
+ std::move(pc),
+ std::move(observer))),
+ video_sources_(std::move(video_sources)),
+ audio_processing_(audio_processing) {
+ signaling_thread_task_safety_ = PendingTaskSafetyFlag::CreateDetached();
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/test_peer.h b/third_party/libwebrtc/test/pc/e2e/test_peer.h
new file mode 100644
index 0000000000..1088871817
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_peer.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_TEST_PEER_H_
+#define TEST_PC_E2E_TEST_PEER_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/function_view.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "api/set_remote_description_observer_interface.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "pc/peer_connection_wrapper.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/pc/e2e/stats_provider.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+// Describes a single participant in the call.
+class TestPeer final : public StatsProvider {
+ public:
+ ~TestPeer() override = default;
+
+ const Params& params() const { return params_; }
+
+ ConfigurableParams configurable_params() const;
+ void AddVideoConfig(VideoConfig config);
+ // Removes video config with specified name. Crashes if the config with
+ // specified name isn't found.
+ void RemoveVideoConfig(absl::string_view stream_label);
+ void SetVideoSubscription(VideoSubscription subscription);
+
+ void GetStats(RTCStatsCollectorCallback* callback) override;
+
+ PeerConfigurer::VideoSource ReleaseVideoSource(size_t i) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return std::move(video_sources_[i]);
+ }
+
+ PeerConnectionFactoryInterface* pc_factory() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->pc_factory();
+ }
+ PeerConnectionInterface* pc() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->pc();
+ }
+ MockPeerConnectionObserver* observer() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->observer();
+ }
+
+ // Tell underlying `PeerConnection` to create an Offer.
+ // `observer` will be invoked on the signaling thread when offer is created.
+ void CreateOffer(
+ rtc::scoped_refptr<CreateSessionDescriptionObserver> observer) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ pc()->CreateOffer(observer.get(), params_.rtc_offer_answer_options);
+ }
+ std::unique_ptr<SessionDescriptionInterface> CreateOffer() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->CreateOffer(params_.rtc_offer_answer_options);
+ }
+
+ std::unique_ptr<SessionDescriptionInterface> CreateAnswer() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->CreateAnswer();
+ }
+
+ bool SetLocalDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->SetLocalDescription(std::move(desc), error_out);
+ }
+
+ // `error_out` will be set only if returned value is false.
+ bool SetRemoteDescription(std::unique_ptr<SessionDescriptionInterface> desc,
+ std::string* error_out = nullptr);
+
+ rtc::scoped_refptr<RtpTransceiverInterface> AddTransceiver(
+ cricket::MediaType media_type,
+ const RtpTransceiverInit& init) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->AddTransceiver(media_type, init);
+ }
+
+ rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface> track,
+ const std::vector<std::string>& stream_ids = {}) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->AddTrack(track, stream_ids);
+ }
+
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label) {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->CreateDataChannel(label);
+ }
+
+ PeerConnectionInterface::SignalingState signaling_state() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->signaling_state();
+ }
+
+ bool IsIceGatheringDone() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->IsIceGatheringDone();
+ }
+
+ bool IsIceConnected() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->IsIceConnected();
+ }
+
+ rtc::scoped_refptr<const RTCStatsReport> GetStats() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ return wrapper_->GetStats();
+ }
+
+ void DetachAecDump() {
+ RTC_CHECK(wrapper_) << "TestPeer is already closed";
+ if (audio_processing_) {
+ audio_processing_->DetachAecDump();
+ }
+ }
+
+ // Adds provided `candidates` to the owned peer connection.
+ bool AddIceCandidates(
+ std::vector<std::unique_ptr<IceCandidateInterface>> candidates);
+
+ // Closes underlying peer connection and destroys all related objects freeing
+ // up related resources.
+ void Close();
+
+ protected:
+ friend class TestPeerFactory;
+ TestPeer(rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory,
+ rtc::scoped_refptr<PeerConnectionInterface> pc,
+ std::unique_ptr<MockPeerConnectionObserver> observer,
+ Params params,
+ ConfigurableParams configurable_params,
+ std::vector<PeerConfigurer::VideoSource> video_sources,
+ rtc::scoped_refptr<AudioProcessing> audio_processing,
+ std::unique_ptr<rtc::Thread> worker_thread);
+
+ private:
+ const Params params_;
+
+ mutable Mutex mutex_;
+ ConfigurableParams configurable_params_ RTC_GUARDED_BY(mutex_);
+
+ // Safety flag to protect all tasks posted on the signaling thread to not be
+ // executed after `wrapper_` object is destructed.
+ rtc::scoped_refptr<PendingTaskSafetyFlag> signaling_thread_task_safety_ =
+ nullptr;
+
+ // Keeps ownership of worker thread. It has to be destroyed after `wrapper_`.
+ // `worker_thread_`can be null if the Peer use only one thread as both the
+ // worker thread and network thread.
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ std::unique_ptr<PeerConnectionWrapper> wrapper_;
+ std::vector<PeerConfigurer::VideoSource> video_sources_;
+ rtc::scoped_refptr<AudioProcessing> audio_processing_;
+
+ std::vector<std::unique_ptr<IceCandidateInterface>> remote_ice_candidates_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_TEST_PEER_H_
diff --git a/third_party/libwebrtc/test/pc/e2e/test_peer_factory.cc b/third_party/libwebrtc/test/pc/e2e/test_peer_factory.cc
new file mode 100644
index 0000000000..7fc12f2c11
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_peer_factory.cc
@@ -0,0 +1,374 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/pc/e2e/test_peer_factory.h"
+
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "absl/strings/string_view.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/create_time_controller.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/time_controller.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "modules/audio_processing/aec_dump/aec_dump_factory.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "rtc_base/thread.h"
+#include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h"
+#include "test/pc/e2e/echo/echo_emulation.h"
+#include "test/testsupport/copy_to_file_audio_capturer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+namespace {
+
+using EmulatedSFUConfigMap =
+ ::webrtc::webrtc_pc_e2e::QualityAnalyzingVideoEncoder::EmulatedSFUConfigMap;
+
+constexpr int16_t kGeneratedAudioMaxAmplitude = 32000;
+constexpr int kDefaultSamplingFrequencyInHz = 48000;
+
+// Sets mandatory entities in injectable components like `pcf_dependencies`
+// and `pc_dependencies` if they are omitted. Also setup required
+// dependencies, that won't be specially provided by factory and will be just
+// transferred to peer connection creation code.
+void SetMandatoryEntities(InjectableComponents* components,
+ TimeController& time_controller) {
+ RTC_DCHECK(components->pcf_dependencies);
+ RTC_DCHECK(components->pc_dependencies);
+
+ // Setup required peer connection factory dependencies.
+ if (components->pcf_dependencies->task_queue_factory == nullptr) {
+ components->pcf_dependencies->task_queue_factory =
+ time_controller.CreateTaskQueueFactory();
+ }
+ if (components->pcf_dependencies->call_factory == nullptr) {
+ components->pcf_dependencies->call_factory =
+ CreateTimeControllerBasedCallFactory(&time_controller);
+ }
+ if (components->pcf_dependencies->event_log_factory == nullptr) {
+ components->pcf_dependencies->event_log_factory =
+ std::make_unique<RtcEventLogFactory>(
+ components->pcf_dependencies->task_queue_factory.get());
+ }
+ if (!components->pcf_dependencies->trials) {
+ components->pcf_dependencies->trials =
+ std::make_unique<FieldTrialBasedConfig>();
+ }
+}
+
+// Returns mapping from stream label to optional spatial index.
+// If we have stream label "Foo" and mapping contains
+// 1. `absl::nullopt` means all simulcast/SVC streams are required
+// 2. Concrete value means that particular simulcast/SVC stream have to be
+// analyzed.
+EmulatedSFUConfigMap CalculateRequiredSpatialIndexPerStream(
+ const std::vector<VideoConfig>& video_configs) {
+ EmulatedSFUConfigMap result;
+ for (auto& video_config : video_configs) {
+ // Stream label should be set by fixture implementation here.
+ RTC_DCHECK(video_config.stream_label);
+ bool res = result
+ .insert({*video_config.stream_label,
+ video_config.emulated_sfu_config})
+ .second;
+ RTC_DCHECK(res) << "Duplicate video_config.stream_label="
+ << *video_config.stream_label;
+ }
+ return result;
+}
+
+std::unique_ptr<TestAudioDeviceModule::Renderer> CreateAudioRenderer(
+ const absl::optional<RemotePeerAudioConfig>& config) {
+ if (!config) {
+ // Return default renderer because we always require some renderer.
+ return TestAudioDeviceModule::CreateDiscardRenderer(
+ kDefaultSamplingFrequencyInHz);
+ }
+ if (config->output_file_name) {
+ return TestAudioDeviceModule::CreateBoundedWavFileWriter(
+ config->output_file_name.value(), config->sampling_frequency_in_hz);
+ }
+ return TestAudioDeviceModule::CreateDiscardRenderer(
+ config->sampling_frequency_in_hz);
+}
+
+std::unique_ptr<TestAudioDeviceModule::Capturer> CreateAudioCapturer(
+ const absl::optional<AudioConfig>& audio_config) {
+ if (!audio_config) {
+ // If we have no audio config we still need to provide some audio device.
+ // In such case use generated capturer. Despite of we provided audio here,
+ // in test media setup audio stream won't be added into peer connection.
+ return TestAudioDeviceModule::CreatePulsedNoiseCapturer(
+ kGeneratedAudioMaxAmplitude, kDefaultSamplingFrequencyInHz);
+ }
+
+ switch (audio_config->mode) {
+ case AudioConfig::Mode::kGenerated:
+ return TestAudioDeviceModule::CreatePulsedNoiseCapturer(
+ kGeneratedAudioMaxAmplitude, audio_config->sampling_frequency_in_hz);
+ case AudioConfig::Mode::kFile:
+ RTC_DCHECK(audio_config->input_file_name);
+ return TestAudioDeviceModule::CreateWavFileReader(
+ audio_config->input_file_name.value(), /*repeat=*/true);
+ }
+}
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
+ absl::optional<AudioConfig> audio_config,
+ absl::optional<RemotePeerAudioConfig> remote_audio_config,
+ absl::optional<EchoEmulationConfig> echo_emulation_config,
+ TaskQueueFactory* task_queue_factory) {
+ std::unique_ptr<TestAudioDeviceModule::Renderer> renderer =
+ CreateAudioRenderer(remote_audio_config);
+ std::unique_ptr<TestAudioDeviceModule::Capturer> capturer =
+ CreateAudioCapturer(audio_config);
+ RTC_DCHECK(renderer);
+ RTC_DCHECK(capturer);
+
+ // Setup echo emulation if required.
+ if (echo_emulation_config) {
+ capturer = std::make_unique<EchoEmulatingCapturer>(std::move(capturer),
+ *echo_emulation_config);
+ renderer = std::make_unique<EchoEmulatingRenderer>(
+ std::move(renderer),
+ static_cast<EchoEmulatingCapturer*>(capturer.get()));
+ }
+
+ // Setup input stream dumping if required.
+ if (audio_config && audio_config->input_dump_file_name) {
+ capturer = std::make_unique<test::CopyToFileAudioCapturer>(
+ std::move(capturer), audio_config->input_dump_file_name.value());
+ }
+
+ return TestAudioDeviceModule::Create(task_queue_factory, std::move(capturer),
+ std::move(renderer), /*speed=*/1.f);
+}
+
+std::unique_ptr<cricket::MediaEngineInterface> CreateMediaEngine(
+ PeerConnectionFactoryComponents* pcf_dependencies,
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_module) {
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_dependencies->task_queue_factory.get();
+ media_deps.adm = audio_device_module;
+ media_deps.audio_processing = pcf_dependencies->audio_processing;
+ media_deps.audio_mixer = pcf_dependencies->audio_mixer;
+ media_deps.video_encoder_factory =
+ std::move(pcf_dependencies->video_encoder_factory);
+ media_deps.video_decoder_factory =
+ std::move(pcf_dependencies->video_decoder_factory);
+ webrtc::SetMediaEngineDefaults(&media_deps);
+ RTC_DCHECK(pcf_dependencies->trials);
+ media_deps.trials = pcf_dependencies->trials.get();
+
+ return cricket::CreateMediaEngine(std::move(media_deps));
+}
+
+void WrapVideoEncoderFactory(
+ absl::string_view peer_name,
+ double bitrate_multiplier,
+ EmulatedSFUConfigMap stream_to_sfu_config,
+ PeerConnectionFactoryComponents* pcf_dependencies,
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) {
+ std::unique_ptr<VideoEncoderFactory> video_encoder_factory;
+ if (pcf_dependencies->video_encoder_factory != nullptr) {
+ video_encoder_factory = std::move(pcf_dependencies->video_encoder_factory);
+ } else {
+ video_encoder_factory = CreateBuiltinVideoEncoderFactory();
+ }
+ pcf_dependencies->video_encoder_factory =
+ video_analyzer_helper->WrapVideoEncoderFactory(
+ peer_name, std::move(video_encoder_factory), bitrate_multiplier,
+ std::move(stream_to_sfu_config));
+}
+
+void WrapVideoDecoderFactory(
+ absl::string_view peer_name,
+ PeerConnectionFactoryComponents* pcf_dependencies,
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) {
+ std::unique_ptr<VideoDecoderFactory> video_decoder_factory;
+ if (pcf_dependencies->video_decoder_factory != nullptr) {
+ video_decoder_factory = std::move(pcf_dependencies->video_decoder_factory);
+ } else {
+ video_decoder_factory = CreateBuiltinVideoDecoderFactory();
+ }
+ pcf_dependencies->video_decoder_factory =
+ video_analyzer_helper->WrapVideoDecoderFactory(
+ peer_name, std::move(video_decoder_factory));
+}
+
+// Creates PeerConnectionFactoryDependencies objects, providing entities
+// from InjectableComponents::PeerConnectionFactoryComponents.
+PeerConnectionFactoryDependencies CreatePCFDependencies(
+ std::unique_ptr<PeerConnectionFactoryComponents> pcf_dependencies,
+ std::unique_ptr<cricket::MediaEngineInterface> media_engine,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread) {
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.signaling_thread = signaling_thread;
+ pcf_deps.worker_thread = worker_thread;
+ pcf_deps.network_thread = network_thread;
+ pcf_deps.media_engine = std::move(media_engine);
+
+ pcf_deps.call_factory = std::move(pcf_dependencies->call_factory);
+ pcf_deps.event_log_factory = std::move(pcf_dependencies->event_log_factory);
+ pcf_deps.task_queue_factory = std::move(pcf_dependencies->task_queue_factory);
+
+ if (pcf_dependencies->fec_controller_factory != nullptr) {
+ pcf_deps.fec_controller_factory =
+ std::move(pcf_dependencies->fec_controller_factory);
+ }
+ if (pcf_dependencies->network_controller_factory != nullptr) {
+ pcf_deps.network_controller_factory =
+ std::move(pcf_dependencies->network_controller_factory);
+ }
+ if (pcf_dependencies->neteq_factory != nullptr) {
+ pcf_deps.neteq_factory = std::move(pcf_dependencies->neteq_factory);
+ }
+ if (pcf_dependencies->trials != nullptr) {
+ pcf_deps.trials = std::move(pcf_dependencies->trials);
+ }
+
+ return pcf_deps;
+}
+
+// Creates PeerConnectionDependencies objects, providing entities
+// from InjectableComponents::PeerConnectionComponents.
+PeerConnectionDependencies CreatePCDependencies(
+ MockPeerConnectionObserver* observer,
+ uint32_t port_allocator_extra_flags,
+ std::unique_ptr<PeerConnectionComponents> pc_dependencies) {
+ PeerConnectionDependencies pc_deps(observer);
+
+ auto port_allocator = std::make_unique<cricket::BasicPortAllocator>(
+ pc_dependencies->network_manager, pc_dependencies->packet_socket_factory);
+
+ // This test does not support TCP
+ int flags = port_allocator_extra_flags | cricket::PORTALLOCATOR_DISABLE_TCP;
+ port_allocator->set_flags(port_allocator->flags() | flags);
+
+ pc_deps.allocator = std::move(port_allocator);
+
+ if (pc_dependencies->async_resolver_factory != nullptr) {
+ pc_deps.async_resolver_factory =
+ std::move(pc_dependencies->async_resolver_factory);
+ }
+ if (pc_dependencies->cert_generator != nullptr) {
+ pc_deps.cert_generator = std::move(pc_dependencies->cert_generator);
+ }
+ if (pc_dependencies->tls_cert_verifier != nullptr) {
+ pc_deps.tls_cert_verifier = std::move(pc_dependencies->tls_cert_verifier);
+ }
+ if (pc_dependencies->ice_transport_factory != nullptr) {
+ pc_deps.ice_transport_factory =
+ std::move(pc_dependencies->ice_transport_factory);
+ }
+ return pc_deps;
+}
+
+} // namespace
+
+absl::optional<RemotePeerAudioConfig> RemotePeerAudioConfig::Create(
+ absl::optional<AudioConfig> config) {
+ if (!config) {
+ return absl::nullopt;
+ }
+ return RemotePeerAudioConfig(config.value());
+}
+
+std::unique_ptr<TestPeer> TestPeerFactory::CreateTestPeer(
+ std::unique_ptr<PeerConfigurer> configurer,
+ std::unique_ptr<MockPeerConnectionObserver> observer,
+ absl::optional<RemotePeerAudioConfig> remote_audio_config,
+ absl::optional<EchoEmulationConfig> echo_emulation_config) {
+ std::unique_ptr<InjectableComponents> components =
+ configurer->ReleaseComponents();
+ std::unique_ptr<Params> params = configurer->ReleaseParams();
+ std::unique_ptr<ConfigurableParams> configurable_params =
+ configurer->ReleaseConfigurableParams();
+ std::vector<PeerConfigurer::VideoSource> video_sources =
+ configurer->ReleaseVideoSources();
+ RTC_DCHECK(components);
+ RTC_DCHECK(params);
+ RTC_DCHECK(configurable_params);
+ RTC_DCHECK_EQ(configurable_params->video_configs.size(),
+ video_sources.size());
+ SetMandatoryEntities(components.get(), time_controller_);
+ params->rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan;
+
+ // Create peer connection factory.
+ if (components->pcf_dependencies->audio_processing == nullptr) {
+ components->pcf_dependencies->audio_processing =
+ webrtc::AudioProcessingBuilder().Create();
+ }
+ if (params->aec_dump_path) {
+ components->pcf_dependencies->audio_processing->CreateAndAttachAecDump(
+ *params->aec_dump_path, -1, task_queue_);
+ }
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_module =
+ CreateAudioDeviceModule(
+ params->audio_config, remote_audio_config, echo_emulation_config,
+ components->pcf_dependencies->task_queue_factory.get());
+ WrapVideoEncoderFactory(
+ params->name.value(), params->video_encoder_bitrate_multiplier,
+ CalculateRequiredSpatialIndexPerStream(
+ configurable_params->video_configs),
+ components->pcf_dependencies.get(), video_analyzer_helper_);
+ WrapVideoDecoderFactory(params->name.value(),
+ components->pcf_dependencies.get(),
+ video_analyzer_helper_);
+ std::unique_ptr<cricket::MediaEngineInterface> media_engine =
+ CreateMediaEngine(components->pcf_dependencies.get(),
+ audio_device_module);
+
+ std::unique_ptr<rtc::Thread> owned_worker_thread =
+ components->worker_thread != nullptr
+ ? nullptr
+ : time_controller_.CreateThread("worker_thread");
+ if (components->worker_thread == nullptr) {
+ components->worker_thread = owned_worker_thread.get();
+ }
+
+ // Store `webrtc::AudioProcessing` into local variable before move of
+ // `components->pcf_dependencies`
+ rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing =
+ components->pcf_dependencies->audio_processing;
+ PeerConnectionFactoryDependencies pcf_deps = CreatePCFDependencies(
+ std::move(components->pcf_dependencies), std::move(media_engine),
+ signaling_thread_, components->worker_thread, components->network_thread);
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> peer_connection_factory =
+ CreateModularPeerConnectionFactory(std::move(pcf_deps));
+
+ // Create peer connection.
+ PeerConnectionDependencies pc_deps =
+ CreatePCDependencies(observer.get(), params->port_allocator_extra_flags,
+ std::move(components->pc_dependencies));
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection =
+ peer_connection_factory
+ ->CreatePeerConnectionOrError(params->rtc_configuration,
+ std::move(pc_deps))
+ .MoveValue();
+ peer_connection->SetBitrate(params->bitrate_settings);
+
+ return absl::WrapUnique(
+ new TestPeer(peer_connection_factory, peer_connection,
+ std::move(observer), std::move(*params),
+ std::move(*configurable_params), std::move(video_sources),
+ audio_processing, std::move(owned_worker_thread)));
+}
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/pc/e2e/test_peer_factory.h b/third_party/libwebrtc/test/pc/e2e/test_peer_factory.h
new file mode 100644
index 0000000000..f2698e2a15
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/e2e/test_peer_factory.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_E2E_TEST_PEER_FACTORY_H_
+#define TEST_PC_E2E_TEST_PEER_FACTORY_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/test/pclf/media_configuration.h"
+#include "api/test/pclf/media_quality_test_params.h"
+#include "api/test/pclf/peer_configurer.h"
+#include "api/test/time_controller.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "rtc_base/task_queue.h"
+#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h"
+#include "test/pc/e2e/test_peer.h"
+
+namespace webrtc {
+namespace webrtc_pc_e2e {
+
+struct RemotePeerAudioConfig {
+ explicit RemotePeerAudioConfig(AudioConfig config)
+ : sampling_frequency_in_hz(config.sampling_frequency_in_hz),
+ output_file_name(config.output_dump_file_name) {}
+
+ static absl::optional<RemotePeerAudioConfig> Create(
+ absl::optional<AudioConfig> config);
+
+ int sampling_frequency_in_hz;
+ absl::optional<std::string> output_file_name;
+};
+
+class TestPeerFactory {
+ public:
+ // Creates a test peer factory.
+ // `signaling_thread` will be used as a signaling thread for all peers created
+ // by this factory.
+ // `time_controller` will be used to create required threads, task queue
+ // factories and call factory.
+ // `video_analyzer_helper` will be used to setup video quality analysis for
+ // created peers.
+ // `task_queue` will be used for AEC dump if it is requested.
+ TestPeerFactory(rtc::Thread* signaling_thread,
+ TimeController& time_controller,
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper,
+ rtc::TaskQueue* task_queue)
+ : signaling_thread_(signaling_thread),
+ time_controller_(time_controller),
+ video_analyzer_helper_(video_analyzer_helper),
+ task_queue_(task_queue) {}
+
+ // Setups all components, that should be provided to WebRTC
+ // PeerConnectionFactory and PeerConnection creation methods,
+ // also will setup dependencies, that are required for media analyzers
+ // injection.
+ std::unique_ptr<TestPeer> CreateTestPeer(
+ std::unique_ptr<PeerConfigurer> configurer,
+ std::unique_ptr<MockPeerConnectionObserver> observer,
+ absl::optional<RemotePeerAudioConfig> remote_audio_config,
+ absl::optional<EchoEmulationConfig> echo_emulation_config);
+
+ private:
+ rtc::Thread* signaling_thread_;
+ TimeController& time_controller_;
+ VideoQualityAnalyzerInjectionHelper* video_analyzer_helper_;
+ rtc::TaskQueue* task_queue_;
+};
+
+} // namespace webrtc_pc_e2e
+} // namespace webrtc
+
+#endif // TEST_PC_E2E_TEST_PEER_FACTORY_H_
diff --git a/third_party/libwebrtc/test/pc/sctp/BUILD.gn b/third_party/libwebrtc/test/pc/sctp/BUILD.gn
new file mode 100644
index 0000000000..f088a5b20c
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/sctp/BUILD.gn
@@ -0,0 +1,18 @@
+# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_source_set("fake_sctp_transport") {
+ visibility = [ "*" ]
+ sources = [ "fake_sctp_transport.h" ]
+ deps = [
+ "../../../api/transport:sctp_transport_factory_interface",
+ "../../../media:rtc_data_sctp_transport_internal",
+ ]
+}
diff --git a/third_party/libwebrtc/test/pc/sctp/fake_sctp_transport.h b/third_party/libwebrtc/test/pc/sctp/fake_sctp_transport.h
new file mode 100644
index 0000000000..a1bb0e219c
--- /dev/null
+++ b/third_party/libwebrtc/test/pc/sctp/fake_sctp_transport.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_
+#define TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_
+
+#include <memory>
+
+#include "api/transport/sctp_transport_factory_interface.h"
+#include "media/sctp/sctp_transport_internal.h"
+
+// Used for tests in this file to verify that PeerConnection responds to signals
+// from the SctpTransport correctly, and calls Start with the correct
+// local/remote ports.
+class FakeSctpTransport : public cricket::SctpTransportInternal {
+ public:
+ void SetOnConnectedCallback(std::function<void()> callback) override {}
+ void SetDataChannelSink(webrtc::DataChannelSink* sink) override {}
+ void SetDtlsTransport(rtc::PacketTransportInternal* transport) override {}
+ bool Start(int local_port, int remote_port, int max_message_size) override {
+ local_port_.emplace(local_port);
+ remote_port_.emplace(remote_port);
+ max_message_size_ = max_message_size;
+ return true;
+ }
+ bool OpenStream(int sid) override { return true; }
+ bool ResetStream(int sid) override { return true; }
+ bool SendData(int sid,
+ const webrtc::SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ cricket::SendDataResult* result = nullptr) override {
+ return true;
+ }
+ bool ReadyToSendData() override { return true; }
+ void set_debug_name_for_testing(const char* debug_name) override {}
+
+ int max_message_size() const { return max_message_size_; }
+ absl::optional<int> max_outbound_streams() const { return absl::nullopt; }
+ absl::optional<int> max_inbound_streams() const { return absl::nullopt; }
+ int local_port() const {
+ RTC_DCHECK(local_port_);
+ return *local_port_;
+ }
+ int remote_port() const {
+ RTC_DCHECK(remote_port_);
+ return *remote_port_;
+ }
+
+ private:
+ absl::optional<int> local_port_;
+ absl::optional<int> remote_port_;
+ int max_message_size_;
+};
+
+class FakeSctpTransportFactory : public webrtc::SctpTransportFactoryInterface {
+ public:
+ std::unique_ptr<cricket::SctpTransportInternal> CreateSctpTransport(
+ rtc::PacketTransportInternal*) override {
+ last_fake_sctp_transport_ = new FakeSctpTransport();
+ return std::unique_ptr<cricket::SctpTransportInternal>(
+ last_fake_sctp_transport_);
+ }
+
+ FakeSctpTransport* last_fake_sctp_transport() {
+ return last_fake_sctp_transport_;
+ }
+
+ private:
+ FakeSctpTransport* last_fake_sctp_transport_ = nullptr;
+};
+
+#endif // TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_
diff --git a/third_party/libwebrtc/test/peer_scenario/BUILD.gn b/third_party/libwebrtc/test/peer_scenario/BUILD.gn
new file mode 100644
index 0000000000..00492a18a9
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/BUILD.gn
@@ -0,0 +1,68 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+if (rtc_include_tests) {
+ rtc_library("peer_scenario") {
+ testonly = true
+ sources = [
+ "peer_scenario.cc",
+ "peer_scenario.h",
+ "peer_scenario_client.cc",
+ "peer_scenario_client.h",
+ "scenario_connection.cc",
+ "scenario_connection.h",
+ "signaling_route.cc",
+ "signaling_route.h",
+ ]
+ deps = [
+ "..:fake_video_codecs",
+ "..:fileutils",
+ "..:test_support",
+ "../:video_test_common",
+ "../../api:candidate",
+ "../../api:create_time_controller",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:network_emulation_manager_api",
+ "../../api:rtc_stats_api",
+ "../../api:time_controller",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/transport:field_trial_based_config",
+ "../../api/video_codecs:builtin_video_decoder_factory",
+ "../../api/video_codecs:builtin_video_encoder_factory",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_media_base",
+ "../../media:rtp_utils",
+ "../../modules/audio_device:audio_device_impl",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../p2p:rtc_p2p",
+ "../../pc:channel",
+ "../../pc:jsep_transport_controller",
+ "../../pc:pc_test_utils",
+ "../../pc:rtp_transport_internal",
+ "../../pc:session_description",
+ "../../rtc_base:null_socket_server",
+ "../../rtc_base:stringutils",
+ "../../rtc_base:task_queue_for_test",
+ "../../test:explicit_key_value_config",
+ "../../test:scoped_key_value_config",
+ "../logging:log_writer",
+ "../network:emulated_network",
+ "../scenario",
+ "../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/memory",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/test/peer_scenario/DEPS b/third_party/libwebrtc/test/peer_scenario/DEPS
new file mode 100644
index 0000000000..68e9f46087
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+pc",
+ "+p2p",
+]
+
diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc b/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc
new file mode 100644
index 0000000000..485e33f67f
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/peer_scenario/peer_scenario.h"
+
+#include "absl/flags/flag.h"
+#include "absl/memory/memory.h"
+#include "rtc_base/null_socket_server.h"
+#include "rtc_base/string_encode.h"
+#include "rtc_base/strings/string_builder.h"
+#include "test/logging/file_log_writer.h"
+#include "test/testsupport/file_utils.h"
+#include "test/time_controller/real_time_controller.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+ABSL_FLAG(bool, peer_logs, false, "Save logs from peer scenario framework.");
+ABSL_FLAG(std::string,
+ peer_logs_root,
+ "",
+ "Output root path, based on project root if unset.");
+
+namespace webrtc {
+namespace test {
+namespace {
+std::unique_ptr<FileLogWriterFactory> GetPeerScenarioLogManager(
+ std::string file_name) {
+ if (absl::GetFlag(FLAGS_peer_logs) && !file_name.empty()) {
+ std::string output_root = absl::GetFlag(FLAGS_peer_logs_root);
+ if (output_root.empty())
+ output_root = OutputPath() + "output_data/";
+
+ auto base_filename = output_root + file_name + ".";
+ RTC_LOG(LS_INFO) << "Saving peer scenario logs to: " << base_filename;
+ return std::make_unique<FileLogWriterFactory>(base_filename);
+ }
+ return nullptr;
+}
+} // namespace
+
+PeerScenario::PeerScenario(const testing::TestInfo& test_info, TimeMode mode)
+ : PeerScenario(
+ std::string(test_info.test_suite_name()) + "/" + test_info.name(),
+ mode) {}
+
+PeerScenario::PeerScenario(std::string file_name, TimeMode mode)
+ : PeerScenario(GetPeerScenarioLogManager(file_name), mode) {}
+
+PeerScenario::PeerScenario(
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_manager,
+ TimeMode mode)
+ : log_writer_manager_(std::move(log_writer_manager)),
+ net_(mode, EmulatedNetworkStatsGatheringMode::kDefault),
+ signaling_thread_(net_.time_controller()->GetMainThread()) {}
+
+PeerScenarioClient* PeerScenario::CreateClient(
+ PeerScenarioClient::Config config) {
+ return CreateClient(
+ std::string("client_") + rtc::ToString(peer_clients_.size() + 1), config);
+}
+
+PeerScenarioClient* PeerScenario::CreateClient(
+ std::string name,
+ PeerScenarioClient::Config config) {
+ peer_clients_.emplace_back(net(), signaling_thread_,
+ GetLogWriterFactory(name), config);
+ return &peer_clients_.back();
+}
+
+SignalingRoute PeerScenario::ConnectSignaling(
+ PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ std::vector<EmulatedNetworkNode*> send_link,
+ std::vector<EmulatedNetworkNode*> ret_link) {
+ return SignalingRoute(caller, callee, net_.CreateCrossTrafficRoute(send_link),
+ net_.CreateCrossTrafficRoute(ret_link));
+}
+
+void PeerScenario::SimpleConnection(
+ PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ std::vector<EmulatedNetworkNode*> send_link,
+ std::vector<EmulatedNetworkNode*> ret_link) {
+ net()->CreateRoute(caller->endpoint(), send_link, callee->endpoint());
+ net()->CreateRoute(callee->endpoint(), ret_link, caller->endpoint());
+ auto signaling = ConnectSignaling(caller, callee, send_link, ret_link);
+ signaling.StartIceSignaling();
+ std::atomic<bool> done(false);
+ signaling.NegotiateSdp(
+ [&](const SessionDescriptionInterface&) { done = true; });
+ RTC_CHECK(WaitAndProcess(&done));
+}
+
+void PeerScenario::AttachVideoQualityAnalyzer(VideoQualityAnalyzer* analyzer,
+ VideoTrackInterface* send_track,
+ PeerScenarioClient* receiver) {
+ video_quality_pairs_.emplace_back(clock(), analyzer);
+ auto pair = &video_quality_pairs_.back();
+ send_track->AddOrUpdateSink(&pair->capture_tap_, rtc::VideoSinkWants());
+ receiver->AddVideoReceiveSink(send_track->id(), &pair->decode_tap_);
+}
+
+bool PeerScenario::WaitAndProcess(std::atomic<bool>* event,
+ TimeDelta max_duration) {
+ return net_.time_controller()->Wait([event] { return event->load(); },
+ max_duration);
+}
+
+void PeerScenario::ProcessMessages(TimeDelta duration) {
+ net_.time_controller()->AdvanceTime(duration);
+}
+
+std::unique_ptr<LogWriterFactoryInterface> PeerScenario::GetLogWriterFactory(
+ std::string name) {
+ if (!log_writer_manager_ || name.empty())
+ return nullptr;
+ return std::make_unique<LogWriterFactoryAddPrefix>(log_writer_manager_.get(),
+ name);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario.h b/third_party/libwebrtc/test/peer_scenario/peer_scenario.h
new file mode 100644
index 0000000000..a177eeaac6
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PEER_SCENARIO_PEER_SCENARIO_H_
+#define TEST_PEER_SCENARIO_PEER_SCENARIO_H_
+
+// The peer connection scenario test framework enables writing end to end unit
+// tests on the peer connection level. It's similar to the Scenario test but
+// uses the full stack, including SDP and ICE negotiation. This ensures that
+// features work end to end. It's also diffferent from the other tests on peer
+// connection level in that it does not rely on any mocks or fakes other than
+// for media input and networking. Additionally it provides direct access to the
+// underlying peer connection class.
+
+#include <list>
+#include <vector>
+
+#include "api/test/time_controller.h"
+#include "test/gtest.h"
+#include "test/logging/log_writer.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/peer_scenario/peer_scenario_client.h"
+#include "test/peer_scenario/signaling_route.h"
+#include "test/scenario/stats_collection.h"
+#include "test/scenario/video_frame_matcher.h"
+
+namespace webrtc {
+namespace test {
+// The PeerScenario class represents a PeerConnection simulation scenario. The
+// main purpose is to maintain ownership and ensure safe destruction order of
+// clients and network emulation. Additionally it reduces the amount of boiler
+// plate requited for some actions. For example usage see the existing tests
+// using this class. Note that it should be used from a single calling thread.
+// This thread will also be assigned as the signaling thread for all peer
+// connections that are created. This means that the process methods must be
+// used when waiting to ensure that messages are processed on the signaling
+// thread.
+class PeerScenario {
+ public:
+ // The name is used for log output when those are enabled by the --peer_logs
+ // command line flag. Optionally, the TestInfo struct available in gtest can
+ // be used to automatically generate a path based on the test name.
+ explicit PeerScenario(const testing::TestInfo& test_info,
+ TimeMode mode = TimeMode::kSimulated);
+ explicit PeerScenario(std::string file_name,
+ TimeMode mode = TimeMode::kSimulated);
+ explicit PeerScenario(
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_manager,
+ TimeMode mode = TimeMode::kSimulated);
+
+ NetworkEmulationManagerImpl* net() { return &net_; }
+
+ // Creates a client wrapping a peer connection conforming to the given config.
+ // The client will share the signaling thread with the scenario. To maintain
+ // control of destruction order, ownership is kept within the scenario.
+ PeerScenarioClient* CreateClient(PeerScenarioClient::Config config);
+ PeerScenarioClient* CreateClient(std::string name,
+ PeerScenarioClient::Config config);
+
+ // Sets up a signaling route that can be used for SDP and ICE.
+ SignalingRoute ConnectSignaling(PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ std::vector<EmulatedNetworkNode*> send_link,
+ std::vector<EmulatedNetworkNode*> ret_link);
+
+ // Connects two clients over given links. This will also start ICE signaling
+ // and SDP negotiation with default behavior. For customized behavior,
+ // ConnectSignaling should be used to allow more detailed control, for
+ // instance to allow different signaling and media routes.
+ void SimpleConnection(PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ std::vector<EmulatedNetworkNode*> send_link,
+ std::vector<EmulatedNetworkNode*> ret_link);
+
+ // Starts feeding the results of comparing captured frames from `send_track`
+ // with decoded frames on `receiver` to `analyzer`.
+ // TODO(srte): Provide a way to detach to allow removal of tracks.
+ void AttachVideoQualityAnalyzer(VideoQualityAnalyzer* analyzer,
+ VideoTrackInterface* send_track,
+ PeerScenarioClient* receiver);
+
+ // Waits on `event` while processing messages on the signaling thread.
+ bool WaitAndProcess(std::atomic<bool>* event,
+ TimeDelta max_duration = TimeDelta::Seconds(5));
+
+ // Process messages on the signaling thread for the given duration.
+ void ProcessMessages(TimeDelta duration);
+
+ private:
+ // Helper struct to maintain ownership of the matcher and taps.
+ struct PeerVideoQualityPair {
+ public:
+ PeerVideoQualityPair(Clock* capture_clock, VideoQualityAnalyzer* analyzer)
+ : matcher_({analyzer->Handler()}),
+ capture_tap_(capture_clock, &matcher_),
+ decode_tap_(capture_clock, &matcher_, 0) {}
+ VideoFrameMatcher matcher_;
+ CapturedFrameTap capture_tap_;
+ DecodedFrameTap decode_tap_;
+ };
+
+ Clock* clock() { return Clock::GetRealTimeClock(); }
+
+ std::unique_ptr<LogWriterFactoryInterface> GetLogWriterFactory(
+ std::string name);
+
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_manager_;
+ NetworkEmulationManagerImpl net_;
+ rtc::Thread* const signaling_thread_;
+ std::list<PeerVideoQualityPair> video_quality_pairs_;
+ std::list<PeerScenarioClient> peer_clients_;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // TEST_PEER_SCENARIO_PEER_SCENARIO_H_
diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc
new file mode 100644
index 0000000000..5d77f17561
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc
@@ -0,0 +1,428 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/peer_scenario/peer_scenario_client.h"
+
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/test/create_time_controller.h"
+#include "api/transport/field_trial_based_config.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "test/fake_decoder.h"
+#include "test/fake_vp8_encoder.h"
+#include "test/frame_generator_capturer.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+constexpr char kCommonStreamId[] = "stream_id";
+
+std::map<int, EmulatedEndpoint*> CreateEndpoints(
+ NetworkEmulationManager* net,
+ std::map<int, EmulatedEndpointConfig> endpoint_configs) {
+ std::map<int, EmulatedEndpoint*> endpoints;
+ for (const auto& kv : endpoint_configs)
+ endpoints[kv.first] = net->CreateEndpoint(kv.second);
+ return endpoints;
+}
+
+class LambdaPeerConnectionObserver final : public PeerConnectionObserver {
+ public:
+ explicit LambdaPeerConnectionObserver(
+ PeerScenarioClient::CallbackHandlers* handlers)
+ : handlers_(handlers) {}
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ for (const auto& handler : handlers_->on_signaling_change)
+ handler(new_state);
+ }
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> data_channel) override {
+ for (const auto& handler : handlers_->on_data_channel)
+ handler(data_channel);
+ }
+ void OnRenegotiationNeeded() override {
+ for (const auto& handler : handlers_->on_renegotiation_needed)
+ handler();
+ }
+ void OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ for (const auto& handler : handlers_->on_standardized_ice_connection_change)
+ handler(new_state);
+ }
+ void OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) override {
+ for (const auto& handler : handlers_->on_connection_change)
+ handler(new_state);
+ }
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ for (const auto& handler : handlers_->on_ice_gathering_change)
+ handler(new_state);
+ }
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ for (const auto& handler : handlers_->on_ice_candidate)
+ handler(candidate);
+ }
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) override {
+ for (const auto& handler : handlers_->on_ice_candidate_error)
+ handler(address, port, url, error_code, error_text);
+ }
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override {
+ for (const auto& handler : handlers_->on_ice_candidates_removed)
+ handler(candidates);
+ }
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface> >&
+ streams) override {
+ for (const auto& handler : handlers_->on_add_track)
+ handler(receiver, streams);
+ }
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override {
+ for (const auto& handler : handlers_->on_track)
+ handler(transceiver);
+ }
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override {
+ for (const auto& handler : handlers_->on_remove_track)
+ handler(receiver);
+ }
+
+ private:
+ PeerScenarioClient::CallbackHandlers* handlers_;
+};
+
+class LambdaCreateSessionDescriptionObserver
+ : public CreateSessionDescriptionObserver {
+ public:
+ explicit LambdaCreateSessionDescriptionObserver(
+ std::function<void(std::unique_ptr<SessionDescriptionInterface> desc)>
+ on_success)
+ : on_success_(on_success) {}
+ void OnSuccess(SessionDescriptionInterface* desc) override {
+ // Takes ownership of answer, according to CreateSessionDescriptionObserver
+ // convention.
+ on_success_(absl::WrapUnique(desc));
+ }
+ void OnFailure(RTCError error) override {
+ RTC_DCHECK_NOTREACHED() << error.message();
+ }
+
+ private:
+ std::function<void(std::unique_ptr<SessionDescriptionInterface> desc)>
+ on_success_;
+};
+
+class LambdaSetLocalDescriptionObserver
+ : public SetLocalDescriptionObserverInterface {
+ public:
+ explicit LambdaSetLocalDescriptionObserver(
+ std::function<void(RTCError)> on_complete)
+ : on_complete_(on_complete) {}
+ void OnSetLocalDescriptionComplete(RTCError error) override {
+ on_complete_(error);
+ }
+
+ private:
+ std::function<void(RTCError)> on_complete_;
+};
+
+class LambdaSetRemoteDescriptionObserver
+ : public SetRemoteDescriptionObserverInterface {
+ public:
+ explicit LambdaSetRemoteDescriptionObserver(
+ std::function<void(RTCError)> on_complete)
+ : on_complete_(on_complete) {}
+ void OnSetRemoteDescriptionComplete(RTCError error) override {
+ on_complete_(error);
+ }
+
+ private:
+ std::function<void(RTCError)> on_complete_;
+};
+
+class FakeVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ FakeVideoEncoderFactory(Clock* clock) : clock_(clock) {}
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ return {SdpVideoFormat("VP8")};
+ }
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override {
+ RTC_CHECK_EQ(format.name, "VP8");
+ return std::make_unique<FakeVp8Encoder>(clock_);
+ }
+
+ private:
+ Clock* const clock_;
+};
+class FakeVideoDecoderFactory : public VideoDecoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ return {SdpVideoFormat("VP8")};
+ }
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override {
+ return std::make_unique<FakeDecoder>();
+ }
+};
+} // namespace
+
+PeerScenarioClient::PeerScenarioClient(
+ NetworkEmulationManager* net,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ PeerScenarioClient::Config config)
+ : endpoints_(CreateEndpoints(net, config.endpoints)),
+ task_queue_factory_(net->time_controller()->GetTaskQueueFactory()),
+ signaling_thread_(signaling_thread),
+ log_writer_factory_(std::move(log_writer_factory)),
+ worker_thread_(net->time_controller()->CreateThread("worker")),
+ handlers_(config.handlers),
+ observer_(new LambdaPeerConnectionObserver(&handlers_)) {
+ handlers_.on_track.push_back(
+ [this](rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ auto track = transceiver->receiver()->track().get();
+ if (track->kind() == MediaStreamTrackInterface::kVideoKind) {
+ auto* video = static_cast<VideoTrackInterface*>(track);
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ for (auto* sink : track_id_to_video_sinks_[track->id()]) {
+ video->AddOrUpdateSink(sink, rtc::VideoSinkWants());
+ }
+ }
+ });
+ handlers_.on_signaling_change.push_back(
+ [this](PeerConnectionInterface::SignalingState state) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (state == PeerConnectionInterface::SignalingState::kStable &&
+ peer_connection_->current_remote_description()) {
+ for (const auto& candidate : pending_ice_candidates_) {
+ RTC_CHECK(peer_connection_->AddIceCandidate(candidate.get()));
+ }
+ pending_ice_candidates_.clear();
+ }
+ });
+
+ std::vector<EmulatedEndpoint*> endpoints_vector;
+ for (const auto& kv : endpoints_)
+ endpoints_vector.push_back(kv.second);
+ auto* manager = net->CreateEmulatedNetworkManagerInterface(endpoints_vector);
+
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.network_thread = manager->network_thread();
+ pcf_deps.signaling_thread = signaling_thread_;
+ pcf_deps.worker_thread = worker_thread_.get();
+ pcf_deps.call_factory =
+ CreateTimeControllerBasedCallFactory(net->time_controller());
+ pcf_deps.task_queue_factory =
+ net->time_controller()->CreateTaskQueueFactory();
+ pcf_deps.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(task_queue_factory_);
+ pcf_deps.trials = std::make_unique<FieldTrialBasedConfig>();
+
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = task_queue_factory_;
+ media_deps.adm = TestAudioDeviceModule::Create(
+ task_queue_factory_,
+ TestAudioDeviceModule::CreatePulsedNoiseCapturer(
+ config.audio.pulsed_noise->amplitude *
+ std::numeric_limits<int16_t>::max(),
+ config.audio.sample_rate, config.audio.channels),
+ TestAudioDeviceModule::CreateDiscardRenderer(config.audio.sample_rate));
+
+ media_deps.audio_processing = AudioProcessingBuilder().Create();
+ if (config.video.use_fake_codecs) {
+ media_deps.video_encoder_factory =
+ std::make_unique<FakeVideoEncoderFactory>(
+ net->time_controller()->GetClock());
+ media_deps.video_decoder_factory =
+ std::make_unique<FakeVideoDecoderFactory>();
+ } else {
+ media_deps.video_encoder_factory = CreateBuiltinVideoEncoderFactory();
+ media_deps.video_decoder_factory = CreateBuiltinVideoDecoderFactory();
+ }
+ media_deps.audio_encoder_factory = CreateBuiltinAudioEncoderFactory();
+ media_deps.audio_decoder_factory = CreateBuiltinAudioDecoderFactory();
+ media_deps.trials = pcf_deps.trials.get();
+
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ pcf_deps.fec_controller_factory = nullptr;
+ pcf_deps.network_controller_factory = nullptr;
+ pcf_deps.network_state_predictor_factory = nullptr;
+
+ pc_factory_ = CreateModularPeerConnectionFactory(std::move(pcf_deps));
+ PeerConnectionFactoryInterface::Options pc_options;
+ pc_options.disable_encryption = config.disable_encryption;
+ pc_factory_->SetOptions(pc_options);
+
+ PeerConnectionDependencies pc_deps(observer_.get());
+ pc_deps.allocator = std::make_unique<cricket::BasicPortAllocator>(
+ manager->network_manager(), manager->packet_socket_factory());
+ pc_deps.allocator->set_flags(pc_deps.allocator->flags() |
+ cricket::PORTALLOCATOR_DISABLE_TCP);
+ peer_connection_ =
+ pc_factory_
+ ->CreatePeerConnectionOrError(config.rtc_config, std::move(pc_deps))
+ .MoveValue();
+ if (log_writer_factory_) {
+ peer_connection_->StartRtcEventLog(log_writer_factory_->Create(".rtc.dat"),
+ /*output_period_ms=*/1000);
+ }
+}
+
+EmulatedEndpoint* PeerScenarioClient::endpoint(int index) {
+ RTC_CHECK_GT(endpoints_.size(), index);
+ return endpoints_.at(index);
+}
+
+PeerScenarioClient::AudioSendTrack PeerScenarioClient::CreateAudio(
+ std::string track_id,
+ cricket::AudioOptions options) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ AudioSendTrack res;
+ auto source = pc_factory_->CreateAudioSource(options);
+ auto track = pc_factory_->CreateAudioTrack(track_id, source.get());
+ res.track = track;
+ res.sender = peer_connection_->AddTrack(track, {kCommonStreamId}).value();
+ return res;
+}
+
+PeerScenarioClient::VideoSendTrack PeerScenarioClient::CreateVideo(
+ std::string track_id,
+ VideoSendTrackConfig config) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ VideoSendTrack res;
+ auto capturer = FrameGeneratorCapturer::Create(clock(), *task_queue_factory_,
+ config.generator);
+ res.capturer = capturer.get();
+ capturer->Init();
+ res.source = rtc::make_ref_counted<FrameGeneratorCapturerVideoTrackSource>(
+ std::move(capturer), config.screencast);
+ auto track = pc_factory_->CreateVideoTrack(track_id, res.source.get());
+ res.track = track.get();
+ res.sender =
+ peer_connection_->AddTrack(track, {kCommonStreamId}).MoveValue().get();
+ return res;
+}
+
+void PeerScenarioClient::AddVideoReceiveSink(
+ std::string track_id,
+ rtc::VideoSinkInterface<VideoFrame>* video_sink) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ track_id_to_video_sinks_[track_id].push_back(video_sink);
+}
+
+void PeerScenarioClient::CreateAndSetSdp(
+ std::function<void(SessionDescriptionInterface*)> munge_offer,
+ std::function<void(std::string)> offer_handler) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ peer_connection_->CreateOffer(
+ rtc::make_ref_counted<LambdaCreateSessionDescriptionObserver>(
+ [=](std::unique_ptr<SessionDescriptionInterface> offer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (munge_offer) {
+ munge_offer(offer.get());
+ }
+ std::string sdp_offer;
+ RTC_CHECK(offer->ToString(&sdp_offer));
+ peer_connection_->SetLocalDescription(
+ std::move(offer),
+ rtc::make_ref_counted<LambdaSetLocalDescriptionObserver>(
+ [sdp_offer, offer_handler](RTCError) {
+ offer_handler(sdp_offer);
+ }));
+ })
+ .get(),
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+}
+
+void PeerScenarioClient::SetSdpOfferAndGetAnswer(
+ std::string remote_offer,
+ std::function<void(std::string)> answer_handler) {
+ if (!signaling_thread_->IsCurrent()) {
+ signaling_thread_->PostTask(
+ [=] { SetSdpOfferAndGetAnswer(remote_offer, answer_handler); });
+ return;
+ }
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ peer_connection_->SetRemoteDescription(
+ CreateSessionDescription(SdpType::kOffer, remote_offer),
+ rtc::make_ref_counted<LambdaSetRemoteDescriptionObserver>([=](RTCError) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ peer_connection_->CreateAnswer(
+ rtc::make_ref_counted<LambdaCreateSessionDescriptionObserver>(
+ [=](std::unique_ptr<SessionDescriptionInterface> answer) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ std::string sdp_answer;
+ answer->ToString(&sdp_answer);
+ RTC_LOG(LS_INFO) << sdp_answer;
+ peer_connection_->SetLocalDescription(
+ std::move(answer),
+ rtc::make_ref_counted<LambdaSetLocalDescriptionObserver>(
+ [answer_handler, sdp_answer](RTCError) {
+ answer_handler(sdp_answer);
+ }));
+ })
+ .get(),
+ PeerConnectionInterface::RTCOfferAnswerOptions());
+ }));
+}
+
+void PeerScenarioClient::SetSdpAnswer(
+ std::string remote_answer,
+ std::function<void(const SessionDescriptionInterface&)> done_handler) {
+ if (!signaling_thread_->IsCurrent()) {
+ signaling_thread_->PostTask(
+ [=] { SetSdpAnswer(remote_answer, done_handler); });
+ return;
+ }
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ peer_connection_->SetRemoteDescription(
+ CreateSessionDescription(SdpType::kAnswer, remote_answer),
+ rtc::make_ref_counted<LambdaSetRemoteDescriptionObserver>(
+ [remote_answer, done_handler](RTCError) {
+ auto answer =
+ CreateSessionDescription(SdpType::kAnswer, remote_answer);
+ done_handler(*answer);
+ }));
+}
+
+void PeerScenarioClient::AddIceCandidate(
+ std::unique_ptr<IceCandidateInterface> candidate) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ if (peer_connection_->signaling_state() ==
+ PeerConnectionInterface::SignalingState::kStable &&
+ peer_connection_->current_remote_description()) {
+ RTC_CHECK(peer_connection_->AddIceCandidate(candidate.get()));
+ } else {
+ pending_ice_candidates_.push_back(std::move(candidate));
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.h b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.h
new file mode 100644
index 0000000000..ab6aac9cf8
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PEER_SCENARIO_PEER_SCENARIO_CLIENT_H_
+#define TEST_PEER_SCENARIO_PEER_SCENARIO_CLIENT_H_
+
+#include <functional>
+#include <list>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/memory/memory.h"
+#include "api/peer_connection_interface.h"
+#include "api/test/network_emulation_manager.h"
+#include "api/test/time_controller.h"
+#include "pc/test/frame_generator_capturer_video_track_source.h"
+#include "test/logging/log_writer.h"
+
+namespace webrtc {
+namespace test {
+
+// Wrapper for a PeerConnection for use in PeerScenario tests. It's intended to
+// be a minimal wrapper for a peer connection that's simple to use in testing.
+// In particular the constructor hides a lot of the required setup for a peer
+// connection.
+class PeerScenarioClient {
+ public:
+ struct CallbackHandlers {
+ std::vector<std::function<void(PeerConnectionInterface::SignalingState)>>
+ on_signaling_change;
+ std::vector<std::function<void(rtc::scoped_refptr<DataChannelInterface>)>>
+ on_data_channel;
+ std::vector<std::function<void()>> on_renegotiation_needed;
+ std::vector<
+ std::function<void(PeerConnectionInterface::IceConnectionState)>>
+ on_standardized_ice_connection_change;
+ std::vector<
+ std::function<void(PeerConnectionInterface::PeerConnectionState)>>
+ on_connection_change;
+ std::vector<std::function<void(PeerConnectionInterface::IceGatheringState)>>
+ on_ice_gathering_change;
+ std::vector<std::function<void(const IceCandidateInterface*)>>
+ on_ice_candidate;
+ std::vector<std::function<void(const std::string&,
+ int,
+ const std::string&,
+ int,
+ const std::string&)>>
+ on_ice_candidate_error;
+ std::vector<std::function<void(const std::vector<cricket::Candidate>&)>>
+ on_ice_candidates_removed;
+ std::vector<std::function<void(
+ rtc::scoped_refptr<RtpReceiverInterface>,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&)>>
+ on_add_track;
+ std::vector<
+ std::function<void(rtc::scoped_refptr<RtpTransceiverInterface>)>>
+ on_track;
+ std::vector<std::function<void(rtc::scoped_refptr<RtpReceiverInterface>)>>
+ on_remove_track;
+ };
+ struct Config {
+ // WebRTC only support one audio device that is setup up on construction, so
+ // we provide the audio generator configuration here rather than on creation
+ // of the tracks. This is unlike video, where multiple capture sources can
+ // be used at the same time.
+ struct AudioSource {
+ int sample_rate = 48000;
+ int channels = 1;
+ struct PulsedNoise {
+ double amplitude = 0.1;
+ };
+ absl::optional<PulsedNoise> pulsed_noise = PulsedNoise();
+ } audio;
+ struct Video {
+ bool use_fake_codecs = false;
+ } video;
+ // The created endpoints can be accessed using the map key as `index` in
+ // PeerScenarioClient::endpoint(index).
+ std::map<int, EmulatedEndpointConfig> endpoints = {
+ {0, EmulatedEndpointConfig()}};
+ CallbackHandlers handlers;
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ bool disable_encryption = false;
+ Config() { rtc_config.sdp_semantics = SdpSemantics::kUnifiedPlan; }
+ };
+
+ struct VideoSendTrackConfig {
+ FrameGeneratorCapturerConfig generator;
+ bool screencast = false;
+ };
+
+ struct AudioSendTrack {
+ rtc::scoped_refptr<AudioTrackInterface> track;
+ rtc::scoped_refptr<RtpSenderInterface> sender;
+ };
+
+ struct VideoSendTrack {
+ // Raw pointer to the capturer owned by `source`.
+ FrameGeneratorCapturer* capturer;
+ rtc::scoped_refptr<FrameGeneratorCapturerVideoTrackSource> source;
+ rtc::scoped_refptr<VideoTrackInterface> track;
+ rtc::scoped_refptr<RtpSenderInterface> sender;
+ };
+
+ PeerScenarioClient(
+ NetworkEmulationManager* net,
+ rtc::Thread* signaling_thread,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ Config config);
+
+ PeerConnectionFactoryInterface* factory() { return pc_factory_.get(); }
+ PeerConnectionInterface* pc() {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ return peer_connection_.get();
+ }
+ rtc::Thread* thread() { return signaling_thread_; }
+ Clock* clock() { return Clock::GetRealTimeClock(); }
+
+ // Returns the endpoint created from the EmulatedEndpointConfig with the same
+ // index in PeerScenarioClient::config.
+ EmulatedEndpoint* endpoint(int index = 0);
+
+ AudioSendTrack CreateAudio(std::string track_id,
+ cricket::AudioOptions options);
+ VideoSendTrack CreateVideo(std::string track_id, VideoSendTrackConfig config);
+
+ void AddVideoReceiveSink(std::string track_id,
+ rtc::VideoSinkInterface<VideoFrame>* video_sink);
+
+ CallbackHandlers* handlers() { return &handlers_; }
+
+ // The `munge_offer` function can be used to munge the SDP, i.e. modify a
+ // local description afer creating it but before setting it. Note that this is
+ // legacy behavior. It's added here only to be able to have test coverage for
+ // scenarios even if they are not spec compliant.
+ void CreateAndSetSdp(
+ std::function<void(SessionDescriptionInterface*)> munge_offer,
+ std::function<void(std::string)> offer_handler);
+ void SetSdpOfferAndGetAnswer(std::string remote_offer,
+ std::function<void(std::string)> answer_handler);
+ void SetSdpAnswer(
+ std::string remote_answer,
+ std::function<void(const SessionDescriptionInterface& answer)>
+ done_handler);
+
+ // Adds the given ice candidate when the peer connection is ready.
+ void AddIceCandidate(std::unique_ptr<IceCandidateInterface> candidate);
+
+ private:
+ const std::map<int, EmulatedEndpoint*> endpoints_;
+ TaskQueueFactory* const task_queue_factory_;
+ rtc::Thread* const signaling_thread_;
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_factory_;
+ const std::unique_ptr<rtc::Thread> worker_thread_;
+ CallbackHandlers handlers_ RTC_GUARDED_BY(signaling_thread_);
+ const std::unique_ptr<PeerConnectionObserver> observer_;
+ std::map<std::string, std::vector<rtc::VideoSinkInterface<VideoFrame>*>>
+ track_id_to_video_sinks_ RTC_GUARDED_BY(signaling_thread_);
+ std::list<std::unique_ptr<IceCandidateInterface>> pending_ice_candidates_
+ RTC_GUARDED_BY(signaling_thread_);
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> pc_factory_;
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection_
+ RTC_GUARDED_BY(signaling_thread_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_PEER_SCENARIO_PEER_SCENARIO_CLIENT_H_
diff --git a/third_party/libwebrtc/test/peer_scenario/scenario_connection.cc b/third_party/libwebrtc/test/peer_scenario/scenario_connection.cc
new file mode 100644
index 0000000000..66eca275d1
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/scenario_connection.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/peer_scenario/scenario_connection.h"
+
+#include "absl/memory/memory.h"
+#include "media/base/rtp_utils.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "p2p/client/basic_port_allocator.h"
+#include "pc/channel.h"
+#include "pc/jsep_transport_controller.h"
+#include "pc/rtp_transport_internal.h"
+#include "pc/session_description.h"
+#include "rtc_base/task_queue_for_test.h"
+
+namespace webrtc {
+class ScenarioIceConnectionImpl : public ScenarioIceConnection,
+ public sigslot::has_slots<>,
+ private JsepTransportController::Observer,
+ private RtpPacketSinkInterface {
+ public:
+ ScenarioIceConnectionImpl(test::NetworkEmulationManagerImpl* net,
+ IceConnectionObserver* observer);
+ ~ScenarioIceConnectionImpl() override;
+
+ void SendRtpPacket(rtc::ArrayView<const uint8_t> packet_view) override;
+ void SendRtcpPacket(rtc::ArrayView<const uint8_t> packet_view) override;
+
+ void SetRemoteSdp(SdpType type, const std::string& remote_sdp) override;
+ void SetLocalSdp(SdpType type, const std::string& local_sdp) override;
+
+ EmulatedEndpoint* endpoint() override { return endpoint_; }
+ const cricket::TransportDescription& transport_description() const override {
+ return transport_description_;
+ }
+
+ private:
+ JsepTransportController::Config CreateJsepConfig();
+ bool OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) override;
+
+ void OnRtpPacket(const RtpPacketReceived& packet) override;
+ void OnCandidates(const std::string& mid,
+ const std::vector<cricket::Candidate>& candidates);
+
+ IceConnectionObserver* const observer_;
+ EmulatedEndpoint* const endpoint_;
+ EmulatedNetworkManagerInterface* const manager_;
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const network_thread_;
+ rtc::scoped_refptr<rtc::RTCCertificate> const certificate_
+ RTC_GUARDED_BY(network_thread_);
+ cricket::TransportDescription const transport_description_
+ RTC_GUARDED_BY(signaling_thread_);
+ std::unique_ptr<cricket::BasicPortAllocator> port_allocator_
+ RTC_GUARDED_BY(network_thread_);
+ std::unique_ptr<JsepTransportController> jsep_controller_;
+ RtpTransportInternal* rtp_transport_ RTC_GUARDED_BY(network_thread_) =
+ nullptr;
+ std::unique_ptr<SessionDescriptionInterface> remote_description_
+ RTC_GUARDED_BY(signaling_thread_);
+ std::unique_ptr<SessionDescriptionInterface> local_description_
+ RTC_GUARDED_BY(signaling_thread_);
+};
+
+std::unique_ptr<ScenarioIceConnection> ScenarioIceConnection::Create(
+ webrtc::test::NetworkEmulationManagerImpl* net,
+ IceConnectionObserver* observer) {
+ return std::make_unique<ScenarioIceConnectionImpl>(net, observer);
+}
+
+ScenarioIceConnectionImpl::ScenarioIceConnectionImpl(
+ test::NetworkEmulationManagerImpl* net,
+ IceConnectionObserver* observer)
+ : observer_(observer),
+ endpoint_(net->CreateEndpoint(EmulatedEndpointConfig())),
+ manager_(net->CreateEmulatedNetworkManagerInterface({endpoint_})),
+ signaling_thread_(rtc::Thread::Current()),
+ network_thread_(manager_->network_thread()),
+ certificate_(rtc::RTCCertificate::Create(
+ rtc::SSLIdentity::Create("", ::rtc::KT_DEFAULT))),
+ transport_description_(
+ /*transport_options*/ {},
+ rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH),
+ rtc::CreateRandomString(cricket::ICE_PWD_LENGTH),
+ cricket::IceMode::ICEMODE_FULL,
+ cricket::ConnectionRole::CONNECTIONROLE_PASSIVE,
+ rtc::SSLFingerprint::CreateFromCertificate(*certificate_.get())
+ .get()),
+ port_allocator_(
+ new cricket::BasicPortAllocator(manager_->network_manager(),
+ manager_->packet_socket_factory())),
+ jsep_controller_(
+ new JsepTransportController(network_thread_,
+ port_allocator_.get(),
+ /*async_resolver_factory*/ nullptr,
+ CreateJsepConfig())) {
+ SendTask(network_thread_, [this] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
+ port_allocator_->set_flags(port_allocator_->flags() | flags);
+ port_allocator_->Initialize();
+ RTC_CHECK(port_allocator_->SetConfiguration(/*stun_servers*/ {},
+ /*turn_servers*/ {}, 0,
+ webrtc::NO_PRUNE));
+ jsep_controller_->SetLocalCertificate(certificate_);
+ });
+}
+
+ScenarioIceConnectionImpl::~ScenarioIceConnectionImpl() {
+ SendTask(network_thread_, [this] {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ jsep_controller_.reset();
+ port_allocator_.reset();
+ rtp_transport_ = nullptr;
+ });
+}
+
+JsepTransportController::Config ScenarioIceConnectionImpl::CreateJsepConfig() {
+ JsepTransportController::Config config;
+ config.transport_observer = this;
+ config.bundle_policy =
+ PeerConnectionInterface::BundlePolicy::kBundlePolicyMaxBundle;
+ config.rtcp_handler = [this](const rtc::CopyOnWriteBuffer& packet,
+ int64_t packet_time_us) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ observer_->OnPacketReceived(packet);
+ };
+ config.field_trials = &field_trials;
+ return config;
+}
+
+void ScenarioIceConnectionImpl::SendRtpPacket(
+ rtc::ArrayView<const uint8_t> packet_view) {
+ rtc::CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(),
+ ::cricket::kMaxRtpPacketLen);
+ network_thread_->PostTask([this, packet = std::move(packet)]() mutable {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (rtp_transport_ != nullptr)
+ rtp_transport_->SendRtpPacket(&packet, rtc::PacketOptions(),
+ cricket::PF_SRTP_BYPASS);
+ });
+}
+
+void ScenarioIceConnectionImpl::SendRtcpPacket(
+ rtc::ArrayView<const uint8_t> packet_view) {
+ rtc::CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(),
+ ::cricket::kMaxRtpPacketLen);
+ network_thread_->PostTask([this, packet = std::move(packet)]() mutable {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (rtp_transport_ != nullptr)
+ rtp_transport_->SendRtcpPacket(&packet, rtc::PacketOptions(),
+ cricket::PF_SRTP_BYPASS);
+ });
+}
+void ScenarioIceConnectionImpl::SetRemoteSdp(SdpType type,
+ const std::string& remote_sdp) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ remote_description_ = webrtc::CreateSessionDescription(type, remote_sdp);
+ jsep_controller_->SubscribeIceCandidateGathered(
+ [this](const std::string& transport,
+ const std::vector<cricket::Candidate>& candidate) {
+ ScenarioIceConnectionImpl::OnCandidates(transport, candidate);
+ });
+
+ auto res = jsep_controller_->SetRemoteDescription(
+ remote_description_->GetType(), remote_description_->description());
+ RTC_CHECK(res.ok()) << res.message();
+ RtpDemuxerCriteria criteria;
+ for (const auto& content : remote_description_->description()->contents()) {
+ if (content.media_description()->as_audio()) {
+ for (const auto& codec :
+ content.media_description()->as_audio()->codecs()) {
+ criteria.payload_types().insert(codec.id);
+ }
+ }
+ if (content.media_description()->as_video()) {
+ for (const auto& codec :
+ content.media_description()->as_video()->codecs()) {
+ criteria.payload_types().insert(codec.id);
+ }
+ }
+ }
+
+ network_thread_->PostTask([this, criteria]() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_DCHECK(rtp_transport_);
+ rtp_transport_->RegisterRtpDemuxerSink(criteria, this);
+ });
+}
+
+void ScenarioIceConnectionImpl::SetLocalSdp(SdpType type,
+ const std::string& local_sdp) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ local_description_ = webrtc::CreateSessionDescription(type, local_sdp);
+ auto res = jsep_controller_->SetLocalDescription(
+ local_description_->GetType(), local_description_->description());
+ RTC_CHECK(res.ok()) << res.message();
+ jsep_controller_->MaybeStartGathering();
+}
+
+bool ScenarioIceConnectionImpl::OnTransportChanged(
+ const std::string& mid,
+ RtpTransportInternal* rtp_transport,
+ rtc::scoped_refptr<DtlsTransport> dtls_transport,
+ DataChannelTransportInterface* data_channel_transport) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (rtp_transport == nullptr) {
+ rtp_transport_->UnregisterRtpDemuxerSink(this);
+ } else {
+ RTC_DCHECK(rtp_transport_ == nullptr || rtp_transport_ == rtp_transport);
+ if (rtp_transport_ != rtp_transport) {
+ rtp_transport_ = rtp_transport;
+ }
+ RtpDemuxerCriteria criteria(mid);
+ rtp_transport_->RegisterRtpDemuxerSink(criteria, this);
+ }
+ return true;
+}
+
+void ScenarioIceConnectionImpl::OnRtpPacket(const RtpPacketReceived& packet) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ observer_->OnPacketReceived(packet.Buffer());
+}
+
+void ScenarioIceConnectionImpl::OnCandidates(
+ const std::string& mid,
+ const std::vector<cricket::Candidate>& candidates) {
+ RTC_DCHECK_RUN_ON(signaling_thread_);
+ observer_->OnIceCandidates(mid, candidates);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/scenario_connection.h b/third_party/libwebrtc/test/peer_scenario/scenario_connection.h
new file mode 100644
index 0000000000..e8cef527c5
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/scenario_connection.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PEER_SCENARIO_SCENARIO_CONNECTION_H_
+#define TEST_PEER_SCENARIO_SCENARIO_CONNECTION_H_
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/candidate.h"
+#include "api/jsep.h"
+#include "p2p/base/transport_description.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+
+// ScenarioIceConnection provides the transport level functionality of a
+// PeerConnection for use in peer connection scenario tests. This allows
+// implementing custom server side behavior in tests.
+class ScenarioIceConnection {
+ public:
+ class IceConnectionObserver {
+ public:
+ // Called on network thread.
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet) = 0;
+ // Called on signaling thread.
+ virtual void OnIceCandidates(
+ const std::string& mid,
+ const std::vector<cricket::Candidate>& candidates) = 0;
+
+ protected:
+ ~IceConnectionObserver() = default;
+ };
+ static std::unique_ptr<ScenarioIceConnection> Create(
+ test::NetworkEmulationManagerImpl* net,
+ IceConnectionObserver* observer);
+
+ virtual ~ScenarioIceConnection() = default;
+
+ // Posts tasks to send packets to network thread.
+ virtual void SendRtpPacket(rtc::ArrayView<const uint8_t> packet_view) = 0;
+ virtual void SendRtcpPacket(rtc::ArrayView<const uint8_t> packet_view) = 0;
+
+ // Used for ICE configuration, called on signaling thread.
+ virtual void SetRemoteSdp(SdpType type, const std::string& remote_sdp) = 0;
+ virtual void SetLocalSdp(SdpType type, const std::string& local_sdp) = 0;
+
+ virtual EmulatedEndpoint* endpoint() = 0;
+ virtual const cricket::TransportDescription& transport_description()
+ const = 0;
+
+ webrtc::test::ScopedKeyValueConfig field_trials;
+};
+
+} // namespace webrtc
+
+#endif // TEST_PEER_SCENARIO_SCENARIO_CONNECTION_H_
diff --git a/third_party/libwebrtc/test/peer_scenario/signaling_route.cc b/third_party/libwebrtc/test/peer_scenario/signaling_route.cc
new file mode 100644
index 0000000000..eeec7c8657
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/signaling_route.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/peer_scenario/signaling_route.h"
+
+#include <memory>
+
+#include "test/network/network_emulation_manager.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr size_t kIcePacketSize = 400;
+constexpr size_t kSdpPacketSize = 1200;
+
+struct IceMessage {
+ IceMessage() = default;
+ explicit IceMessage(const IceCandidateInterface* candidate)
+ : sdp_mid(candidate->sdp_mid()),
+ sdp_mline_index(candidate->sdp_mline_index()) {
+ RTC_CHECK(candidate->ToString(&sdp_line));
+ }
+ std::unique_ptr<IceCandidateInterface> AsCandidate() const {
+ SdpParseError err;
+ std::unique_ptr<IceCandidateInterface> candidate(
+ CreateIceCandidate(sdp_mid, sdp_mline_index, sdp_line, &err));
+ RTC_CHECK(candidate) << "Failed to parse: \"" << err.line
+ << "\". Reason: " << err.description;
+ return candidate;
+ }
+ std::string sdp_mid;
+ int sdp_mline_index;
+ std::string sdp_line;
+};
+
+void StartIceSignalingForRoute(PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ CrossTrafficRoute* send_route) {
+ caller->handlers()->on_ice_candidate.push_back(
+ [=](const IceCandidateInterface* candidate) {
+ IceMessage msg(candidate);
+ send_route->NetworkDelayedAction(kIcePacketSize, [callee, msg]() {
+ callee->thread()->PostTask(
+ [callee, msg]() { callee->AddIceCandidate(msg.AsCandidate()); });
+ });
+ });
+}
+
+void StartSdpNegotiation(
+ PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ CrossTrafficRoute* send_route,
+ CrossTrafficRoute* ret_route,
+ std::function<void(SessionDescriptionInterface* offer)> munge_offer,
+ std::function<void(SessionDescriptionInterface*)> modify_offer,
+ std::function<void(const SessionDescriptionInterface&)> exchange_finished) {
+ caller->CreateAndSetSdp(munge_offer, [=](std::string sdp_offer) {
+ if (modify_offer) {
+ auto offer = CreateSessionDescription(SdpType::kOffer, sdp_offer);
+ modify_offer(offer.get());
+ RTC_CHECK(offer->ToString(&sdp_offer));
+ }
+ send_route->NetworkDelayedAction(kSdpPacketSize, [=] {
+ callee->SetSdpOfferAndGetAnswer(sdp_offer, [=](std::string answer) {
+ ret_route->NetworkDelayedAction(kSdpPacketSize, [=] {
+ caller->SetSdpAnswer(std::move(answer), std::move(exchange_finished));
+ });
+ });
+ });
+ });
+}
+} // namespace
+
+SignalingRoute::SignalingRoute(PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ CrossTrafficRoute* send_route,
+ CrossTrafficRoute* ret_route)
+ : caller_(caller),
+ callee_(callee),
+ send_route_(send_route),
+ ret_route_(ret_route) {}
+
+void SignalingRoute::StartIceSignaling() {
+ StartIceSignalingForRoute(caller_, callee_, send_route_);
+ StartIceSignalingForRoute(callee_, caller_, ret_route_);
+}
+
+void SignalingRoute::NegotiateSdp(
+ std::function<void(SessionDescriptionInterface*)> munge_offer,
+ std::function<void(SessionDescriptionInterface*)> modify_offer,
+ std::function<void(const SessionDescriptionInterface&)> exchange_finished) {
+ StartSdpNegotiation(caller_, callee_, send_route_, ret_route_, munge_offer,
+ modify_offer, exchange_finished);
+}
+
+void SignalingRoute::NegotiateSdp(
+ std::function<void(SessionDescriptionInterface*)> modify_offer,
+ std::function<void(const SessionDescriptionInterface&)> exchange_finished) {
+ NegotiateSdp({}, modify_offer, exchange_finished);
+}
+
+void SignalingRoute::NegotiateSdp(
+ std::function<void(const SessionDescriptionInterface&)> exchange_finished) {
+ NegotiateSdp({}, {}, exchange_finished);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/signaling_route.h b/third_party/libwebrtc/test/peer_scenario/signaling_route.h
new file mode 100644
index 0000000000..a95ae5c9f7
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/signaling_route.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PEER_SCENARIO_SIGNALING_ROUTE_H_
+#define TEST_PEER_SCENARIO_SIGNALING_ROUTE_H_
+
+#include <string>
+#include <utility>
+
+#include "test/network/network_emulation_manager.h"
+#include "test/peer_scenario/peer_scenario_client.h"
+
+namespace webrtc {
+namespace test {
+
+// Helper class to reduce the amount of boilerplate required for ICE signalling
+// ad SDP negotiation.
+class SignalingRoute {
+ public:
+ SignalingRoute(PeerScenarioClient* caller,
+ PeerScenarioClient* callee,
+ CrossTrafficRoute* send_route,
+ CrossTrafficRoute* ret_route);
+
+ void StartIceSignaling();
+
+ // The `modify_offer` callback is used to modify an offer after the local
+ // description has been set. This is legal (but odd) behavior.
+ // The `munge_offer` callback is used to modify an offer between its creation
+ // and set local description. This behavior is forbidden according to the spec
+ // but available here in order to allow test coverage on corner cases.
+ // The `exchange_finished` callback is called with the answer produced after
+ // SDP negotations has completed.
+ // TODO(srte): Handle lossy links.
+ void NegotiateSdp(
+ std::function<void(SessionDescriptionInterface* offer)> munge_offer,
+ std::function<void(SessionDescriptionInterface* offer)> modify_offer,
+ std::function<void(const SessionDescriptionInterface& answer)>
+ exchange_finished);
+ void NegotiateSdp(
+ std::function<void(SessionDescriptionInterface* offer)> modify_offer,
+ std::function<void(const SessionDescriptionInterface& answer)>
+ exchange_finished);
+ void NegotiateSdp(
+ std::function<void(const SessionDescriptionInterface& answer)>
+ exchange_finished);
+ SignalingRoute reverse() {
+ return SignalingRoute(callee_, caller_, ret_route_, send_route_);
+ }
+
+ private:
+ PeerScenarioClient* const caller_;
+ PeerScenarioClient* const callee_;
+ CrossTrafficRoute* const send_route_;
+ CrossTrafficRoute* const ret_route_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_PEER_SCENARIO_SIGNALING_ROUTE_H_
diff --git a/third_party/libwebrtc/test/peer_scenario/tests/BUILD.gn b/third_party/libwebrtc/test/peer_scenario/tests/BUILD.gn
new file mode 100644
index 0000000000..ba6ec20e84
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/tests/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+if (rtc_include_tests) {
+ rtc_library("tests") {
+ testonly = true
+ sources = [
+ "peer_scenario_quality_test.cc",
+ "remote_estimate_test.cc",
+ "unsignaled_stream_test.cc",
+ ]
+ deps = [
+ "..:peer_scenario",
+ "../../:field_trial",
+ "../../:test_support",
+ "../../../media:rtc_media_base",
+ "../../../media:stream_params",
+ "../../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../../pc:media_session",
+ "../../../pc:session_description",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/test/peer_scenario/tests/peer_scenario_quality_test.cc b/third_party/libwebrtc/test/peer_scenario/tests/peer_scenario_quality_test.cc
new file mode 100644
index 0000000000..911a68720f
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/tests/peer_scenario_quality_test.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/gtest.h"
+#include "test/peer_scenario/peer_scenario.h"
+#include "test/peer_scenario/peer_scenario_client.h"
+
+namespace webrtc {
+namespace test {
+#if defined(WEBRTC_WIN)
+#define MAYBE_PsnrIsCollected DISABLED_PsnrIsCollected
+#else
+#define MAYBE_PsnrIsCollected PsnrIsCollected
+#endif
+TEST(PeerScenarioQualityTest, MAYBE_PsnrIsCollected) {
+ VideoQualityAnalyzer analyzer;
+ {
+ PeerScenario s(*test_info_);
+ auto caller = s.CreateClient(PeerScenarioClient::Config());
+ auto callee = s.CreateClient(PeerScenarioClient::Config());
+ PeerScenarioClient::VideoSendTrackConfig video_conf;
+ video_conf.generator.squares_video->framerate = 20;
+ auto video = caller->CreateVideo("VIDEO", video_conf);
+ auto link_builder = s.net()->NodeBuilder().delay_ms(100).capacity_kbps(600);
+ s.AttachVideoQualityAnalyzer(&analyzer, video.track.get(), callee);
+ s.SimpleConnection(caller, callee, {link_builder.Build().node},
+ {link_builder.Build().node});
+ s.ProcessMessages(TimeDelta::Seconds(2));
+ // Exit scope to ensure that there's no pending tasks reporting to analyzer.
+ }
+
+ // We expect ca 40 frames to be produced, but to avoid flakiness on slow
+ // machines we only test for 10.
+ EXPECT_GT(analyzer.stats().render.count, 10);
+ EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 20);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/tests/remote_estimate_test.cc b/third_party/libwebrtc/test/peer_scenario/tests/remote_estimate_test.cc
new file mode 100644
index 0000000000..2dfbfdd3c9
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/tests/remote_estimate_test.cc
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "modules/rtp_rtcp/source/rtp_packet.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "pc/media_session.h"
+#include "pc/session_description.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/peer_scenario/peer_scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+RtpHeaderExtensionMap AudioExtensions(
+ const SessionDescriptionInterface& session) {
+ auto* audio_desc =
+ cricket::GetFirstAudioContentDescription(session.description());
+ return RtpHeaderExtensionMap(audio_desc->rtp_header_extensions());
+}
+
+} // namespace
+
+TEST(RemoteEstimateEndToEnd, OfferedCapabilityIsInAnswer) {
+ PeerScenario s(*test_info_);
+
+ auto* caller = s.CreateClient(PeerScenarioClient::Config());
+ auto* callee = s.CreateClient(PeerScenarioClient::Config());
+
+ auto send_link = {s.net()->NodeBuilder().Build().node};
+ auto ret_link = {s.net()->NodeBuilder().Build().node};
+
+ s.net()->CreateRoute(caller->endpoint(), send_link, callee->endpoint());
+ s.net()->CreateRoute(callee->endpoint(), ret_link, caller->endpoint());
+
+ auto signaling = s.ConnectSignaling(caller, callee, send_link, ret_link);
+ caller->CreateVideo("VIDEO", PeerScenarioClient::VideoSendTrackConfig());
+ std::atomic<bool> offer_exchange_done(false);
+ signaling.NegotiateSdp(
+ [](SessionDescriptionInterface* offer) {
+ for (auto& cont : offer->description()->contents()) {
+ cont.media_description()->set_remote_estimate(true);
+ }
+ },
+ [&](const SessionDescriptionInterface& answer) {
+ for (auto& cont : answer.description()->contents()) {
+ EXPECT_TRUE(cont.media_description()->remote_estimate());
+ }
+ offer_exchange_done = true;
+ });
+ RTC_CHECK(s.WaitAndProcess(&offer_exchange_done));
+}
+
+TEST(RemoteEstimateEndToEnd, AudioUsesAbsSendTimeExtension) {
+ // Defined before PeerScenario so it gets destructed after, to avoid use after free.
+ std::atomic<bool> received_abs_send_time(false);
+ PeerScenario s(*test_info_);
+
+ auto* caller = s.CreateClient(PeerScenarioClient::Config());
+ auto* callee = s.CreateClient(PeerScenarioClient::Config());
+
+ auto send_node = s.net()->NodeBuilder().Build().node;
+ auto ret_node = s.net()->NodeBuilder().Build().node;
+
+ s.net()->CreateRoute(caller->endpoint(), {send_node}, callee->endpoint());
+ s.net()->CreateRoute(callee->endpoint(), {ret_node}, caller->endpoint());
+
+ auto signaling = s.ConnectSignaling(caller, callee, {send_node}, {ret_node});
+ caller->CreateAudio("AUDIO", cricket::AudioOptions());
+ signaling.StartIceSignaling();
+ RtpHeaderExtensionMap extension_map;
+ std::atomic<bool> offer_exchange_done(false);
+ signaling.NegotiateSdp(
+ [&extension_map](SessionDescriptionInterface* offer) {
+ extension_map = AudioExtensions(*offer);
+ EXPECT_TRUE(extension_map.IsRegistered(kRtpExtensionAbsoluteSendTime));
+ },
+ [&](const SessionDescriptionInterface& answer) {
+ EXPECT_TRUE(AudioExtensions(answer).IsRegistered(
+ kRtpExtensionAbsoluteSendTime));
+ offer_exchange_done = true;
+ });
+ RTC_CHECK(s.WaitAndProcess(&offer_exchange_done));
+ send_node->router()->SetWatcher(
+ [extension_map, &received_abs_send_time](const EmulatedIpPacket& packet) {
+ // The dummy packets used by the fake signaling are filled with 0. We
+ // want to ignore those and we can do that on the basis that the first
+ // byte of RTP packets are guaranteed to not be 0.
+ RtpPacket rtp_packet(&extension_map);
+ // TODO(bugs.webrtc.org/14525): Look why there are RTP packets with
+ // payload 72 or 73 (these don't have the RTP AbsoluteSendTime
+ // Extension).
+ if (rtp_packet.Parse(packet.data) && rtp_packet.PayloadType() == 111) {
+ EXPECT_TRUE(rtp_packet.HasExtension<AbsoluteSendTime>());
+ received_abs_send_time = true;
+ }
+ });
+ RTC_CHECK(s.WaitAndProcess(&received_abs_send_time));
+ caller->pc()->Close();
+ callee->pc()->Close();
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/peer_scenario/tests/unsignaled_stream_test.cc b/third_party/libwebrtc/test/peer_scenario/tests/unsignaled_stream_test.cc
new file mode 100644
index 0000000000..4f478b4b2a
--- /dev/null
+++ b/third_party/libwebrtc/test/peer_scenario/tests/unsignaled_stream_test.cc
@@ -0,0 +1,270 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/stream_params.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "pc/media_session.h"
+#include "pc/session_description.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/peer_scenario/peer_scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+enum class MidTestConfiguration {
+ // Legacy endpoint setup where PT demuxing is used.
+ kMidNotNegotiated,
+ // MID is negotiated but missing from packets. PT demuxing is disabled, so
+ // SSRCs have to be added to the SDP for WebRTC to forward packets correctly.
+ // Happens when client is spec compliant but the SFU isn't. Popular legacy.
+ kMidNegotiatedButMissingFromPackets,
+ // Fully spec-compliant: MID is present so we can safely drop packets with
+ // unknown MIDs.
+ kMidNegotiatedAndPresentInPackets,
+};
+
+// Gives the parameterized test a readable suffix.
+std::string TestParametersMidTestConfigurationToString(
+ testing::TestParamInfo<MidTestConfiguration> info) {
+ switch (info.param) {
+ case MidTestConfiguration::kMidNotNegotiated:
+ return "MidNotNegotiated";
+ case MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ return "MidNegotiatedButMissingFromPackets";
+ case MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ return "MidNegotiatedAndPresentInPackets";
+ }
+}
+
+class FrameObserver : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ FrameObserver() : frame_observed_(false) {}
+ void OnFrame(const VideoFrame&) override { frame_observed_ = true; }
+
+ std::atomic<bool> frame_observed_;
+};
+
+uint32_t get_ssrc(SessionDescriptionInterface* offer, size_t track_index) {
+ EXPECT_LT(track_index, offer->description()->contents().size());
+ return offer->description()
+ ->contents()[track_index]
+ .media_description()
+ ->streams()[0]
+ .ssrcs[0];
+}
+
+void set_ssrc(SessionDescriptionInterface* offer, size_t index, uint32_t ssrc) {
+ EXPECT_LT(index, offer->description()->contents().size());
+ cricket::StreamParams& new_stream_params = offer->description()
+ ->contents()[index]
+ .media_description()
+ ->mutable_streams()[0];
+ new_stream_params.ssrcs[0] = ssrc;
+ new_stream_params.ssrc_groups[0].ssrcs[0] = ssrc;
+}
+
+} // namespace
+
+class UnsignaledStreamTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<MidTestConfiguration> {};
+
+TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) {
+ // This test covers a scenario that might occur if a remote client starts
+ // sending media packets before negotiation has completed. Depending on setup,
+ // these packets either get dropped or trigger an unsignalled default stream
+ // to be created, and connects that to a default video sink.
+ // In some edge cases using Unified Plan and PT demuxing, the default stream
+ // is create in a different transceiver to where the media SSRC will actually
+ // be used. This test verifies that the default stream is removed properly,
+ // and that packets are demuxed and video frames reach the desired sink.
+ const MidTestConfiguration kMidTestConfiguration = GetParam();
+
+ // Defined before PeerScenario so it gets destructed after, to avoid use after
+ // free.
+ PeerScenario s(*::testing::UnitTest::GetInstance()->current_test_info());
+
+ PeerScenarioClient::Config config = PeerScenarioClient::Config();
+ // Disable encryption so that we can inject a fake early media packet without
+ // triggering srtp failures.
+ config.disable_encryption = true;
+ auto* caller = s.CreateClient(config);
+ auto* callee = s.CreateClient(config);
+
+ auto send_node = s.net()->NodeBuilder().Build().node;
+ auto ret_node = s.net()->NodeBuilder().Build().node;
+
+ s.net()->CreateRoute(caller->endpoint(), {send_node}, callee->endpoint());
+ s.net()->CreateRoute(callee->endpoint(), {ret_node}, caller->endpoint());
+
+ auto signaling = s.ConnectSignaling(caller, callee, {send_node}, {ret_node});
+ PeerScenarioClient::VideoSendTrackConfig video_conf;
+ video_conf.generator.squares_video->framerate = 15;
+
+ auto first_track = caller->CreateVideo("VIDEO", video_conf);
+ FrameObserver first_sink;
+ callee->AddVideoReceiveSink(first_track.track->id(), &first_sink);
+
+ signaling.StartIceSignaling();
+ std::atomic<bool> offer_exchange_done(false);
+ std::atomic<bool> got_unsignaled_packet(false);
+
+ // We will capture the media ssrc of the first added stream, and preemptively
+ // inject a new media packet using a different ssrc. What happens depends on
+ // the test configuration.
+ //
+ // MidTestConfiguration::kMidNotNegotiated:
+ // - MID is not negotiated which means PT-based demuxing is enabled. Because
+ // the packets have no MID, the second ssrc packet gets forwarded to the
+ // first m= section. This will create a "default stream" for the second ssrc
+ // and connect it to the default video sink (not set in this test). The test
+ // verifies we can recover from this when we later get packets for the first
+ // ssrc.
+ //
+ // MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ // - MID is negotiated wich means PT-based demuxing is disabled. Because we
+ // modify the packets not to contain the MID anyway (simulating a legacy SFU
+ // that does not negotiate properly) unknown SSRCs are dropped but do not
+ // otherwise cause any issues.
+ //
+ // MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ // - MID is negotiated which means PT-based demuxing is enabled. In this case
+ // the packets have the MID so they either get forwarded or dropped
+ // depending on if the MID is known. The spec-compliant way is also the most
+ // straight-forward one.
+
+ uint32_t first_ssrc = 0;
+ uint32_t second_ssrc = 0;
+ absl::optional<int> mid_header_extension_id = absl::nullopt;
+
+ signaling.NegotiateSdp(
+ /* munge_sdp = */
+ [&](SessionDescriptionInterface* offer) {
+ // Obtain the MID header extension ID and if we want the
+ // MidTestConfiguration::kMidNotNegotiated setup then we remove the MID
+ // header extension through SDP munging (otherwise SDP is not modified).
+ for (cricket::ContentInfo& content_info :
+ offer->description()->contents()) {
+ std::vector<RtpExtension> header_extensions =
+ content_info.media_description()->rtp_header_extensions();
+ for (auto it = header_extensions.begin();
+ it != header_extensions.end(); ++it) {
+ if (it->uri == RtpExtension::kMidUri) {
+ // MID header extension found!
+ mid_header_extension_id = it->id;
+ if (kMidTestConfiguration ==
+ MidTestConfiguration::kMidNotNegotiated) {
+ // Munge away the extension.
+ header_extensions.erase(it);
+ }
+ break;
+ }
+ }
+ content_info.media_description()->set_rtp_header_extensions(
+ std::move(header_extensions));
+ }
+ ASSERT_TRUE(mid_header_extension_id.has_value());
+ },
+ /* modify_sdp = */
+ [&](SessionDescriptionInterface* offer) {
+ first_ssrc = get_ssrc(offer, 0);
+ second_ssrc = first_ssrc + 1;
+
+ send_node->router()->SetWatcher([&](const EmulatedIpPacket& packet) {
+ if (IsRtpPacket(packet.data) &&
+ ByteReader<uint32_t>::ReadBigEndian(&(packet.cdata()[8])) ==
+ first_ssrc &&
+ !got_unsignaled_packet) {
+ // Parse packet and modify the SSRC to simulate a second m=
+ // section that has not been negotiated yet.
+ std::vector<RtpExtension> extensions;
+ extensions.emplace_back(RtpExtension::kMidUri,
+ mid_header_extension_id.value());
+ RtpHeaderExtensionMap extensions_map(extensions);
+ RtpPacket parsed_packet;
+ parsed_packet.IdentifyExtensions(extensions_map);
+ ASSERT_TRUE(parsed_packet.Parse(packet.data));
+ parsed_packet.SetSsrc(second_ssrc);
+ // The MID extension is present if and only if it was negotiated.
+ // If present, we either want to remove it or modify it depending
+ // on setup.
+ switch (kMidTestConfiguration) {
+ case MidTestConfiguration::kMidNotNegotiated:
+ EXPECT_FALSE(parsed_packet.HasExtension<RtpMid>());
+ break;
+ case MidTestConfiguration::kMidNegotiatedButMissingFromPackets:
+ EXPECT_TRUE(parsed_packet.HasExtension<RtpMid>());
+ ASSERT_TRUE(parsed_packet.RemoveExtension(RtpMid::kId));
+ break;
+ case MidTestConfiguration::kMidNegotiatedAndPresentInPackets:
+ EXPECT_TRUE(parsed_packet.HasExtension<RtpMid>());
+ // The simulated second m= section would have a different MID.
+ // If we don't modify it here then `second_ssrc` would end up
+ // being mapped to the first m= section which would cause SSRC
+ // conflicts if we later add the same SSRC to a second m=
+ // section. Hidden assumption: first m= section does not use
+ // MID:1.
+ ASSERT_TRUE(parsed_packet.SetExtension<RtpMid>("1"));
+ break;
+ }
+ // Inject the modified packet.
+ rtc::CopyOnWriteBuffer updated_buffer = parsed_packet.Buffer();
+ EmulatedIpPacket updated_packet(
+ packet.from, packet.to, updated_buffer, packet.arrival_time);
+ send_node->OnPacketReceived(std::move(updated_packet));
+ got_unsignaled_packet = true;
+ }
+ });
+ },
+ [&](const SessionDescriptionInterface& answer) {
+ EXPECT_EQ(answer.description()->contents().size(), 1u);
+ offer_exchange_done = true;
+ });
+ EXPECT_TRUE(s.WaitAndProcess(&offer_exchange_done));
+ EXPECT_TRUE(s.WaitAndProcess(&got_unsignaled_packet));
+ EXPECT_TRUE(s.WaitAndProcess(&first_sink.frame_observed_));
+
+ auto second_track = caller->CreateVideo("VIDEO2", video_conf);
+ FrameObserver second_sink;
+ callee->AddVideoReceiveSink(second_track.track->id(), &second_sink);
+
+ // Create a second video stream, munge the sdp to force it to use our fake
+ // early media ssrc.
+ offer_exchange_done = false;
+ signaling.NegotiateSdp(
+ /* munge_sdp = */
+ [&](SessionDescriptionInterface* offer) {
+ set_ssrc(offer, 1, second_ssrc);
+ },
+ /* modify_sdp = */ {},
+ [&](const SessionDescriptionInterface& answer) {
+ EXPECT_EQ(answer.description()->contents().size(), 2u);
+ offer_exchange_done = true;
+ });
+ EXPECT_TRUE(s.WaitAndProcess(&offer_exchange_done));
+ EXPECT_TRUE(s.WaitAndProcess(&second_sink.frame_observed_));
+ caller->pc()->Close();
+ callee->pc()->Close();
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ UnsignaledStreamTest,
+ ::testing::Values(MidTestConfiguration::kMidNotNegotiated,
+ MidTestConfiguration::kMidNegotiatedButMissingFromPackets,
+ MidTestConfiguration::kMidNegotiatedAndPresentInPackets),
+ TestParametersMidTestConfigurationToString);
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/platform_video_capturer.cc b/third_party/libwebrtc/test/platform_video_capturer.cc
new file mode 100644
index 0000000000..fb3392a052
--- /dev/null
+++ b/third_party/libwebrtc/test/platform_video_capturer.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/platform_video_capturer.h"
+
+#include "absl/memory/memory.h"
+#if defined(WEBRTC_MAC)
+#include "test/mac_capturer.h"
+#else
+#include "test/vcm_capturer.h"
+#endif
+
+namespace webrtc {
+namespace test {
+
+std::unique_ptr<TestVideoCapturer> CreateVideoCapturer(
+ size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index) {
+#if defined(WEBRTC_MAC)
+ return absl::WrapUnique<TestVideoCapturer>(test::MacCapturer::Create(
+ width, height, target_fps, capture_device_index));
+#else
+ return absl::WrapUnique<TestVideoCapturer>(test::VcmCapturer::Create(
+ width, height, target_fps, capture_device_index));
+#endif
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/platform_video_capturer.h b/third_party/libwebrtc/test/platform_video_capturer.h
new file mode 100644
index 0000000000..241ba87df8
--- /dev/null
+++ b/third_party/libwebrtc/test/platform_video_capturer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_PLATFORM_VIDEO_CAPTURER_H_
+#define TEST_PLATFORM_VIDEO_CAPTURER_H_
+
+#include <memory>
+
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace test {
+
+std::unique_ptr<TestVideoCapturer> CreateVideoCapturer(
+ size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_PLATFORM_VIDEO_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/rtcp_packet_parser.cc b/third_party/libwebrtc/test/rtcp_packet_parser.cc
new file mode 100644
index 0000000000..e286ec5a36
--- /dev/null
+++ b/third_party/libwebrtc/test/rtcp_packet_parser.cc
@@ -0,0 +1,112 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/rtcp_packet_parser.h"
+
+#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace test {
+
+RtcpPacketParser::RtcpPacketParser() = default;
+RtcpPacketParser::~RtcpPacketParser() = default;
+
+bool RtcpPacketParser::Parse(const void* data, size_t length) {
+ ++processed_rtcp_packets_;
+
+ const uint8_t* const buffer = static_cast<const uint8_t*>(data);
+ const uint8_t* const buffer_end = buffer + length;
+
+ rtcp::CommonHeader header;
+ for (const uint8_t* next_packet = buffer; next_packet != buffer_end;
+ next_packet = header.NextPacket()) {
+ RTC_DCHECK_GT(buffer_end - next_packet, 0);
+ if (!header.Parse(next_packet, buffer_end - next_packet)) {
+ RTC_LOG(LS_WARNING)
+ << "Invalid rtcp header or unaligned rtcp packet at position "
+ << (next_packet - buffer);
+ return false;
+ }
+ switch (header.type()) {
+ case rtcp::App::kPacketType:
+ app_.Parse(header);
+ break;
+ case rtcp::Bye::kPacketType:
+ bye_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::ExtendedReports::kPacketType:
+ xr_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Psfb::kPacketType:
+ switch (header.fmt()) {
+ case rtcp::Fir::kFeedbackMessageType:
+ fir_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Pli::kFeedbackMessageType:
+ pli_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Psfb::kAfbMessageType:
+ if (!loss_notification_.Parse(header, &sender_ssrc_) &&
+ !remb_.Parse(header, &sender_ssrc_)) {
+ RTC_LOG(LS_WARNING) << "Unknown application layer FB message.";
+ }
+ break;
+ default:
+ RTC_LOG(LS_WARNING)
+ << "Unknown rtcp payload specific feedback type "
+ << header.fmt();
+ break;
+ }
+ break;
+ case rtcp::ReceiverReport::kPacketType:
+ receiver_report_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Rtpfb::kPacketType:
+ switch (header.fmt()) {
+ case rtcp::Nack::kFeedbackMessageType:
+ nack_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::RapidResyncRequest::kFeedbackMessageType:
+ rrr_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Tmmbn::kFeedbackMessageType:
+ tmmbn_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::Tmmbr::kFeedbackMessageType:
+ tmmbr_.Parse(header, &sender_ssrc_);
+ break;
+ case rtcp::TransportFeedback::kFeedbackMessageType:
+ transport_feedback_.Parse(header, &sender_ssrc_);
+ break;
+ default:
+ RTC_LOG(LS_WARNING)
+ << "Unknown rtcp transport feedback type " << header.fmt();
+ break;
+ }
+ break;
+ case rtcp::Sdes::kPacketType:
+ sdes_.Parse(header);
+ break;
+ case rtcp::SenderReport::kPacketType:
+ sender_report_.Parse(header, &sender_ssrc_);
+ break;
+ default:
+ RTC_LOG(LS_WARNING) << "Unknown rtcp packet type " << header.type();
+ break;
+ }
+ }
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/rtcp_packet_parser.h b/third_party/libwebrtc/test/rtcp_packet_parser.h
new file mode 100644
index 0000000000..9e8c9685e9
--- /dev/null
+++ b/third_party/libwebrtc/test/rtcp_packet_parser.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef TEST_RTCP_PACKET_PARSER_H_
+#define TEST_RTCP_PACKET_PARSER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/app.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/bye.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/fir.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/nack.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/pli.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/remb.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/sdes.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h"
+#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+// Parse RTCP packet of given type. Assumes RTCP header is valid and that there
+// is excatly one packet of correct type in the buffer.
+template <typename Packet>
+bool ParseSinglePacket(const uint8_t* buffer, size_t size, Packet* packet) {
+ rtcp::CommonHeader header;
+ RTC_CHECK(header.Parse(buffer, size));
+ RTC_CHECK_EQ(size, header.NextPacket() - buffer);
+ return packet->Parse(header);
+}
+// Same function, but takes raw buffer as single argument instead of pair.
+template <typename Packet>
+bool ParseSinglePacket(rtc::ArrayView<const uint8_t> buffer, Packet* packet) {
+ return ParseSinglePacket(buffer.data(), buffer.size(), packet);
+}
+
+class RtcpPacketParser {
+ public:
+ // Keeps last parsed packet, count number of parsed packets of given type.
+ template <typename TypedRtcpPacket>
+ class PacketCounter : public TypedRtcpPacket {
+ public:
+ int num_packets() const { return num_packets_; }
+ void Parse(const rtcp::CommonHeader& header) {
+ if (TypedRtcpPacket::Parse(header))
+ ++num_packets_;
+ }
+ bool Parse(const rtcp::CommonHeader& header, uint32_t* sender_ssrc) {
+ const bool result = TypedRtcpPacket::Parse(header);
+ if (result) {
+ ++num_packets_;
+ if (*sender_ssrc == 0) // Use first sender ssrc in compound packet.
+ *sender_ssrc = TypedRtcpPacket::sender_ssrc();
+ }
+ return result;
+ }
+
+ private:
+ int num_packets_ = 0;
+ };
+
+ RtcpPacketParser();
+ ~RtcpPacketParser();
+
+ bool Parse(const void* packet, size_t packet_len);
+
+ PacketCounter<rtcp::App>* app() { return &app_; }
+ PacketCounter<rtcp::Bye>* bye() { return &bye_; }
+ PacketCounter<rtcp::ExtendedReports>* xr() { return &xr_; }
+ PacketCounter<rtcp::Fir>* fir() { return &fir_; }
+ PacketCounter<rtcp::Nack>* nack() { return &nack_; }
+ PacketCounter<rtcp::Pli>* pli() { return &pli_; }
+ PacketCounter<rtcp::RapidResyncRequest>* rrr() { return &rrr_; }
+ PacketCounter<rtcp::ReceiverReport>* receiver_report() {
+ return &receiver_report_;
+ }
+ PacketCounter<rtcp::LossNotification>* loss_notification() {
+ return &loss_notification_;
+ }
+ PacketCounter<rtcp::Remb>* remb() { return &remb_; }
+ PacketCounter<rtcp::Sdes>* sdes() { return &sdes_; }
+ PacketCounter<rtcp::SenderReport>* sender_report() { return &sender_report_; }
+ PacketCounter<rtcp::Tmmbn>* tmmbn() { return &tmmbn_; }
+ PacketCounter<rtcp::Tmmbr>* tmmbr() { return &tmmbr_; }
+ PacketCounter<rtcp::TransportFeedback>* transport_feedback() {
+ return &transport_feedback_;
+ }
+ uint32_t sender_ssrc() const { return sender_ssrc_; }
+ size_t processed_rtcp_packets() const { return processed_rtcp_packets_; }
+
+ private:
+ PacketCounter<rtcp::App> app_;
+ PacketCounter<rtcp::Bye> bye_;
+ PacketCounter<rtcp::ExtendedReports> xr_;
+ PacketCounter<rtcp::Fir> fir_;
+ PacketCounter<rtcp::Nack> nack_;
+ PacketCounter<rtcp::Pli> pli_;
+ PacketCounter<rtcp::RapidResyncRequest> rrr_;
+ PacketCounter<rtcp::ReceiverReport> receiver_report_;
+ PacketCounter<rtcp::LossNotification> loss_notification_;
+ PacketCounter<rtcp::Remb> remb_;
+ PacketCounter<rtcp::Sdes> sdes_;
+ PacketCounter<rtcp::SenderReport> sender_report_;
+ PacketCounter<rtcp::Tmmbn> tmmbn_;
+ PacketCounter<rtcp::Tmmbr> tmmbr_;
+ PacketCounter<rtcp::TransportFeedback> transport_feedback_;
+ uint32_t sender_ssrc_ = 0;
+ size_t processed_rtcp_packets_ = 0;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // TEST_RTCP_PACKET_PARSER_H_
diff --git a/third_party/libwebrtc/test/rtp_file_reader.cc b/third_party/libwebrtc/test/rtp_file_reader.cc
new file mode 100644
index 0000000000..b6f3cbbe5b
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_reader.cc
@@ -0,0 +1,691 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/rtp_file_reader.h"
+
+#include <stdio.h>
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/system/arch.h"
+
+namespace webrtc {
+namespace test {
+
+static const size_t kFirstLineLength = 80;
+static uint16_t kPacketHeaderSize = 8;
+
+#define TRY(expr) \
+ do { \
+ if (!(expr)) { \
+ RTC_LOG(LS_INFO) << "Failed to read"; \
+ return false; \
+ } \
+ } while (0)
+
+bool ReadUint32(uint32_t* out, FILE* file) {
+ *out = 0;
+ for (size_t i = 0; i < 4; ++i) {
+ *out <<= 8;
+ uint8_t tmp;
+ if (fread(&tmp, 1, sizeof(uint8_t), file) != sizeof(uint8_t))
+ return false;
+ *out |= tmp;
+ }
+ return true;
+}
+
+bool ReadUint16(uint16_t* out, FILE* file) {
+ *out = 0;
+ for (size_t i = 0; i < 2; ++i) {
+ *out <<= 8;
+ uint8_t tmp;
+ if (fread(&tmp, 1, sizeof(uint8_t), file) != sizeof(uint8_t))
+ return false;
+ *out |= tmp;
+ }
+ return true;
+}
+
+class RtpFileReaderImpl : public RtpFileReader {
+ public:
+ virtual bool Init(FILE* file, const std::set<uint32_t>& ssrc_filter) = 0;
+};
+
+class InterleavedRtpFileReader : public RtpFileReaderImpl {
+ public:
+ ~InterleavedRtpFileReader() override {
+ if (file_ != nullptr) {
+ fclose(file_);
+ file_ = nullptr;
+ }
+ }
+
+ bool Init(FILE* file, const std::set<uint32_t>& ssrc_filter) override {
+ file_ = file;
+ return true;
+ }
+
+ bool NextPacket(RtpPacket* packet) override {
+ RTC_DCHECK(file_);
+ packet->length = RtpPacket::kMaxPacketBufferSize;
+ uint32_t len = 0;
+ TRY(ReadUint32(&len, file_));
+ if (packet->length < len) {
+ RTC_FATAL() << "Packet is too large to fit: " << len << " bytes vs "
+ << packet->length
+ << " bytes allocated. Consider increasing the buffer "
+ << "size";
+ }
+ if (fread(packet->data, 1, len, file_) != len)
+ return false;
+
+ packet->length = len;
+ packet->original_length = len;
+ packet->time_ms = time_ms_;
+ time_ms_ += 5;
+ return true;
+ }
+
+ private:
+ FILE* file_ = nullptr;
+ int64_t time_ms_ = 0;
+};
+
+// Read RTP packets from file in rtpdump format, as documented at:
+// http://www.cs.columbia.edu/irt/software/rtptools/
+class RtpDumpReader : public RtpFileReaderImpl {
+ public:
+ RtpDumpReader() : file_(nullptr) {}
+ ~RtpDumpReader() override {
+ if (file_ != nullptr) {
+ fclose(file_);
+ file_ = nullptr;
+ }
+ }
+
+ RtpDumpReader(const RtpDumpReader&) = delete;
+ RtpDumpReader& operator=(const RtpDumpReader&) = delete;
+
+ bool Init(FILE* file, const std::set<uint32_t>& ssrc_filter) override {
+ file_ = file;
+
+ char firstline[kFirstLineLength + 1] = {0};
+ if (fgets(firstline, kFirstLineLength, file_) == nullptr) {
+ RTC_LOG(LS_INFO) << "Can't read from file";
+ return false;
+ }
+ if (strncmp(firstline, "#!rtpplay", 9) == 0) {
+ if (strncmp(firstline, "#!rtpplay1.0", 12) != 0) {
+ RTC_LOG(LS_INFO) << "Wrong rtpplay version, must be 1.0";
+ return false;
+ }
+ } else if (strncmp(firstline, "#!RTPencode", 11) == 0) {
+ if (strncmp(firstline, "#!RTPencode1.0", 14) != 0) {
+ RTC_LOG(LS_INFO) << "Wrong RTPencode version, must be 1.0";
+ return false;
+ }
+ } else {
+ RTC_LOG(LS_INFO) << "Wrong file format of input file";
+ return false;
+ }
+
+ uint32_t start_sec;
+ uint32_t start_usec;
+ uint32_t source;
+ uint16_t port;
+ uint16_t padding;
+ TRY(ReadUint32(&start_sec, file_));
+ TRY(ReadUint32(&start_usec, file_));
+ TRY(ReadUint32(&source, file_));
+ TRY(ReadUint16(&port, file_));
+ TRY(ReadUint16(&padding, file_));
+
+ return true;
+ }
+
+ bool NextPacket(RtpPacket* packet) override {
+ uint8_t* rtp_data = packet->data;
+ packet->length = RtpPacket::kMaxPacketBufferSize;
+
+ uint16_t len;
+ uint16_t plen;
+ uint32_t offset;
+ TRY(ReadUint16(&len, file_));
+ TRY(ReadUint16(&plen, file_));
+ TRY(ReadUint32(&offset, file_));
+
+ // Use 'len' here because a 'plen' of 0 specifies rtcp.
+ len -= kPacketHeaderSize;
+ if (packet->length < len) {
+ RTC_LOG(LS_ERROR) << "Packet is too large to fit: " << len << " bytes vs "
+ << packet->length
+ << " bytes allocated. Consider increasing the buffer "
+ "size";
+ return false;
+ }
+ if (fread(rtp_data, 1, len, file_) != len) {
+ return false;
+ }
+
+ packet->length = len;
+ packet->original_length = plen;
+ packet->time_ms = offset;
+ return true;
+ }
+
+ private:
+ FILE* file_;
+};
+
+enum {
+ kResultFail = -1,
+ kResultSuccess = 0,
+ kResultSkip = 1,
+
+ kPcapVersionMajor = 2,
+ kPcapVersionMinor = 4,
+ kLinktypeNull = 0,
+ kLinktypeEthernet = 1,
+ kBsdNullLoopback1 = 0x00000002,
+ kBsdNullLoopback2 = 0x02000000,
+ kEthernetIIHeaderMacSkip = 12,
+ kEthertypeIp = 0x0800,
+ kIpVersion4 = 4,
+ kMinIpHeaderLength = 20,
+ kFragmentOffsetClear = 0x0000,
+ kFragmentOffsetDoNotFragment = 0x4000,
+ kProtocolTcp = 0x06,
+ kProtocolUdp = 0x11,
+ kUdpHeaderLength = 8,
+ kMaxReadBufferSize = 4096
+};
+
+const uint32_t kPcapBOMSwapOrder = 0xd4c3b2a1UL;
+const uint32_t kPcapBOMNoSwapOrder = 0xa1b2c3d4UL;
+
+#define TRY_PCAP(expr) \
+ do { \
+ int r = (expr); \
+ if (r == kResultFail) { \
+ RTC_LOG(LS_INFO) << "FAIL at " << __FILE__ << ":" << __LINE__; \
+ return kResultFail; \
+ } else if (r == kResultSkip) { \
+ return kResultSkip; \
+ } \
+ } while (0)
+
+// Read RTP packets from file in tcpdump/libpcap format, as documented at:
+// http://wiki.wireshark.org/Development/LibpcapFileFormat
+class PcapReader : public RtpFileReaderImpl {
+ public:
+ PcapReader()
+ : file_(nullptr),
+ swap_pcap_byte_order_(false),
+#ifdef WEBRTC_ARCH_BIG_ENDIAN
+ swap_network_byte_order_(false),
+#else
+ swap_network_byte_order_(true),
+#endif
+ read_buffer_(),
+ packets_by_ssrc_(),
+ packets_(),
+ next_packet_it_() {
+ }
+
+ ~PcapReader() override {
+ if (file_ != nullptr) {
+ fclose(file_);
+ file_ = nullptr;
+ }
+ }
+
+ PcapReader(const PcapReader&) = delete;
+ PcapReader& operator=(const PcapReader&) = delete;
+
+ bool Init(FILE* file, const std::set<uint32_t>& ssrc_filter) override {
+ return Initialize(file, ssrc_filter) == kResultSuccess;
+ }
+
+ int Initialize(FILE* file, const std::set<uint32_t>& ssrc_filter) {
+ file_ = file;
+
+ if (ReadGlobalHeader() < 0) {
+ return kResultFail;
+ }
+
+ int total_packet_count = 0;
+ uint32_t stream_start_ms = 0;
+ int32_t next_packet_pos = ftell(file_);
+ for (;;) {
+ TRY_PCAP(fseek(file_, next_packet_pos, SEEK_SET));
+ int result = ReadPacket(&next_packet_pos, stream_start_ms,
+ ++total_packet_count, ssrc_filter);
+ if (result == kResultFail) {
+ break;
+ } else if (result == kResultSuccess && packets_.size() == 1) {
+ RTC_DCHECK_EQ(stream_start_ms, 0);
+ PacketIterator it = packets_.begin();
+ stream_start_ms = it->time_offset_ms;
+ it->time_offset_ms = 0;
+ }
+ }
+
+ if (feof(file_) == 0) {
+ printf("Failed reading file!\n");
+ return kResultFail;
+ }
+
+ printf("Total packets in file: %d\n", total_packet_count);
+ printf("Total RTP/RTCP packets: %zu\n", packets_.size());
+
+ for (SsrcMapIterator mit = packets_by_ssrc_.begin();
+ mit != packets_by_ssrc_.end(); ++mit) {
+ uint32_t ssrc = mit->first;
+ const std::vector<uint32_t>& packet_indices = mit->second;
+ int pt = packets_[packet_indices[0]].payload_type;
+ printf("SSRC: %08x, %zu packets, pt=%d\n", ssrc, packet_indices.size(),
+ pt);
+ }
+
+ // TODO(solenberg): Better validation of identified SSRC streams.
+ //
+ // Since we're dealing with raw network data here, we will wrongly identify
+ // some packets as RTP. When these packets are consumed by RtpPlayer, they
+ // are unlikely to cause issues as they will ultimately be filtered out by
+ // the RtpRtcp module. However, we should really do better filtering here,
+ // which we can accomplish in a number of ways, e.g.:
+ //
+ // - Verify that the time stamps and sequence numbers for RTP packets are
+ // both increasing/decreasing. If they move in different directions, the
+ // SSRC is likely bogus and can be dropped. (Normally they should be inc-
+ // reasing but we must allow packet reordering).
+ // - If RTP sequence number is not changing, drop the stream.
+ // - Can also use srcip:port->dstip:port pairs, assuming few SSRC collisions
+ // for up/down streams.
+
+ next_packet_it_ = packets_.begin();
+ return kResultSuccess;
+ }
+
+ bool NextPacket(RtpPacket* packet) override {
+ uint32_t length = RtpPacket::kMaxPacketBufferSize;
+ if (NextPcap(packet->data, &length, &packet->time_ms) != kResultSuccess)
+ return false;
+ packet->length = static_cast<size_t>(length);
+ packet->original_length = packet->length;
+ return true;
+ }
+
+ virtual int NextPcap(uint8_t* data, uint32_t* length, uint32_t* time_ms) {
+ RTC_DCHECK(data);
+ RTC_DCHECK(length);
+ RTC_DCHECK(time_ms);
+
+ if (next_packet_it_ == packets_.end()) {
+ return -1;
+ }
+ if (*length < next_packet_it_->payload_length) {
+ return -1;
+ }
+ TRY_PCAP(fseek(file_, next_packet_it_->pos_in_file, SEEK_SET));
+ TRY_PCAP(Read(data, next_packet_it_->payload_length));
+ *length = next_packet_it_->payload_length;
+ *time_ms = next_packet_it_->time_offset_ms;
+ next_packet_it_++;
+
+ return 0;
+ }
+
+ private:
+ // A marker of an RTP packet within the file.
+ struct RtpPacketMarker {
+ uint32_t packet_number; // One-based index (like in WireShark)
+ uint32_t time_offset_ms;
+ uint32_t source_ip;
+ uint32_t dest_ip;
+ uint16_t source_port;
+ uint16_t dest_port;
+ // Payload type of the RTP packet,
+ // or RTCP packet type of the first RTCP packet in a compound RTCP packet.
+ int payload_type;
+ int32_t pos_in_file; // Byte offset of payload from start of file.
+ uint32_t payload_length;
+ };
+
+ typedef std::vector<RtpPacketMarker>::iterator PacketIterator;
+ typedef std::map<uint32_t, std::vector<uint32_t> > SsrcMap;
+ typedef std::map<uint32_t, std::vector<uint32_t> >::iterator SsrcMapIterator;
+
+ int ReadGlobalHeader() {
+ uint32_t magic;
+ TRY_PCAP(Read(&magic, false));
+ if (magic == kPcapBOMSwapOrder) {
+ swap_pcap_byte_order_ = true;
+ } else if (magic == kPcapBOMNoSwapOrder) {
+ swap_pcap_byte_order_ = false;
+ } else {
+ return kResultFail;
+ }
+
+ uint16_t version_major;
+ uint16_t version_minor;
+ TRY_PCAP(Read(&version_major, false));
+ TRY_PCAP(Read(&version_minor, false));
+ if (version_major != kPcapVersionMajor ||
+ version_minor != kPcapVersionMinor) {
+ return kResultFail;
+ }
+
+ int32_t this_zone; // GMT to local correction.
+ uint32_t sigfigs; // Accuracy of timestamps.
+ uint32_t snaplen; // Max length of captured packets, in octets.
+ uint32_t network; // Data link type.
+ TRY_PCAP(Read(&this_zone, false));
+ TRY_PCAP(Read(&sigfigs, false));
+ TRY_PCAP(Read(&snaplen, false));
+ TRY_PCAP(Read(&network, false));
+
+ // Accept only LINKTYPE_NULL and LINKTYPE_ETHERNET.
+ // See: http://www.tcpdump.org/linktypes.html
+ if (network != kLinktypeNull && network != kLinktypeEthernet) {
+ return kResultFail;
+ }
+
+ return kResultSuccess;
+ }
+
+ int ReadPacket(int32_t* next_packet_pos,
+ uint32_t stream_start_ms,
+ uint32_t number,
+ const std::set<uint32_t>& ssrc_filter) {
+ RTC_DCHECK(next_packet_pos);
+
+ uint32_t ts_sec; // Timestamp seconds.
+ uint32_t ts_usec; // Timestamp microseconds.
+ uint32_t incl_len; // Number of octets of packet saved in file.
+ uint32_t orig_len; // Actual length of packet.
+ TRY_PCAP(Read(&ts_sec, false));
+ TRY_PCAP(Read(&ts_usec, false));
+ TRY_PCAP(Read(&incl_len, false));
+ TRY_PCAP(Read(&orig_len, false));
+
+ *next_packet_pos = ftell(file_) + incl_len;
+
+ RtpPacketMarker marker = {0};
+ marker.packet_number = number;
+ marker.time_offset_ms = CalcTimeDelta(ts_sec, ts_usec, stream_start_ms);
+ TRY_PCAP(ReadPacketHeader(&marker));
+ marker.pos_in_file = ftell(file_);
+
+ if (marker.payload_length > sizeof(read_buffer_)) {
+ printf("Packet too large!\n");
+ return kResultFail;
+ }
+ TRY_PCAP(Read(read_buffer_, marker.payload_length));
+
+ rtc::ArrayView<const uint8_t> packet(read_buffer_, marker.payload_length);
+ if (IsRtcpPacket(packet)) {
+ marker.payload_type = packet[1];
+ packets_.push_back(marker);
+ } else if (IsRtpPacket(packet)) {
+ uint32_t ssrc = ParseRtpSsrc(packet);
+ marker.payload_type = ParseRtpPayloadType(packet);
+ if (ssrc_filter.empty() || ssrc_filter.find(ssrc) != ssrc_filter.end()) {
+ packets_by_ssrc_[ssrc].push_back(
+ static_cast<uint32_t>(packets_.size()));
+ packets_.push_back(marker);
+ } else {
+ return kResultSkip;
+ }
+ } else {
+ RTC_LOG(LS_INFO) << "Not recognized as RTP/RTCP";
+ return kResultSkip;
+ }
+
+ return kResultSuccess;
+ }
+
+ int ReadPacketHeader(RtpPacketMarker* marker) {
+ int32_t file_pos = ftell(file_);
+
+ // Check for BSD null/loopback frame header. The header is just 4 bytes in
+ // native byte order, so we check for both versions as we don't care about
+ // the header as such and will likely fail reading the IP header if this is
+ // something else than null/loopback.
+ uint32_t protocol;
+ TRY_PCAP(Read(&protocol, true));
+ if (protocol == kBsdNullLoopback1 || protocol == kBsdNullLoopback2) {
+ int result = ReadXxpIpHeader(marker);
+ RTC_LOG(LS_INFO) << "Recognized loopback frame";
+ if (result != kResultSkip) {
+ return result;
+ }
+ }
+
+ TRY_PCAP(fseek(file_, file_pos, SEEK_SET));
+
+ // Check for Ethernet II, IP frame header.
+ uint16_t type;
+ TRY_PCAP(Skip(kEthernetIIHeaderMacSkip)); // Source+destination MAC.
+ TRY_PCAP(Read(&type, true));
+ if (type == kEthertypeIp) {
+ int result = ReadXxpIpHeader(marker);
+ RTC_LOG(LS_INFO) << "Recognized ethernet 2 frame";
+ if (result != kResultSkip) {
+ return result;
+ }
+ }
+
+ return kResultSkip;
+ }
+
+ uint32_t CalcTimeDelta(uint32_t ts_sec, uint32_t ts_usec, uint32_t start_ms) {
+ // Round to nearest ms.
+ uint64_t t2_ms =
+ ((static_cast<uint64_t>(ts_sec) * 1000000) + ts_usec + 500) / 1000;
+ uint64_t t1_ms = static_cast<uint64_t>(start_ms);
+ if (t2_ms < t1_ms) {
+ return 0;
+ } else {
+ return t2_ms - t1_ms;
+ }
+ }
+
+ int ReadXxpIpHeader(RtpPacketMarker* marker) {
+ RTC_DCHECK(marker);
+
+ uint16_t version;
+ uint16_t length;
+ uint16_t id;
+ uint16_t fragment;
+ uint16_t protocol;
+ uint16_t checksum;
+ TRY_PCAP(Read(&version, true));
+ TRY_PCAP(Read(&length, true));
+ TRY_PCAP(Read(&id, true));
+ TRY_PCAP(Read(&fragment, true));
+ TRY_PCAP(Read(&protocol, true));
+ TRY_PCAP(Read(&checksum, true));
+ TRY_PCAP(Read(&marker->source_ip, true));
+ TRY_PCAP(Read(&marker->dest_ip, true));
+
+ if (((version >> 12) & 0x000f) != kIpVersion4) {
+ RTC_LOG(LS_INFO) << "IP header is not IPv4";
+ return kResultSkip;
+ }
+
+ if (fragment != kFragmentOffsetClear &&
+ fragment != kFragmentOffsetDoNotFragment) {
+ RTC_LOG(LS_INFO) << "IP fragments cannot be handled";
+ return kResultSkip;
+ }
+
+ // Skip remaining fields of IP header.
+ uint16_t header_length = (version & 0x0f00) >> (8 - 2);
+ RTC_DCHECK_GE(header_length, kMinIpHeaderLength);
+ TRY_PCAP(Skip(header_length - kMinIpHeaderLength));
+
+ protocol = protocol & 0x00ff;
+ if (protocol == kProtocolTcp) {
+ RTC_LOG(LS_INFO) << "TCP packets are not handled";
+ return kResultSkip;
+ } else if (protocol == kProtocolUdp) {
+ uint16_t length;
+ uint16_t checksum;
+ TRY_PCAP(Read(&marker->source_port, true));
+ TRY_PCAP(Read(&marker->dest_port, true));
+ TRY_PCAP(Read(&length, true));
+ TRY_PCAP(Read(&checksum, true));
+ marker->payload_length = length - kUdpHeaderLength;
+ } else {
+ RTC_LOG(LS_INFO) << "Unknown transport (expected UDP or TCP)";
+ return kResultSkip;
+ }
+
+ return kResultSuccess;
+ }
+
+ int Read(uint32_t* out, bool expect_network_order) {
+ uint32_t tmp = 0;
+ if (fread(&tmp, 1, sizeof(uint32_t), file_) != sizeof(uint32_t)) {
+ return kResultFail;
+ }
+ if ((!expect_network_order && swap_pcap_byte_order_) ||
+ (expect_network_order && swap_network_byte_order_)) {
+ tmp = ((tmp >> 24) & 0x000000ff) | (tmp << 24) |
+ ((tmp >> 8) & 0x0000ff00) | ((tmp << 8) & 0x00ff0000);
+ }
+ *out = tmp;
+ return kResultSuccess;
+ }
+
+ int Read(uint16_t* out, bool expect_network_order) {
+ uint16_t tmp = 0;
+ if (fread(&tmp, 1, sizeof(uint16_t), file_) != sizeof(uint16_t)) {
+ return kResultFail;
+ }
+ if ((!expect_network_order && swap_pcap_byte_order_) ||
+ (expect_network_order && swap_network_byte_order_)) {
+ tmp = ((tmp >> 8) & 0x00ff) | (tmp << 8);
+ }
+ *out = tmp;
+ return kResultSuccess;
+ }
+
+ int Read(uint8_t* out, uint32_t count) {
+ if (fread(out, 1, count, file_) != count) {
+ return kResultFail;
+ }
+ return kResultSuccess;
+ }
+
+ int Read(int32_t* out, bool expect_network_order) {
+ int32_t tmp = 0;
+ if (fread(&tmp, 1, sizeof(uint32_t), file_) != sizeof(uint32_t)) {
+ return kResultFail;
+ }
+ if ((!expect_network_order && swap_pcap_byte_order_) ||
+ (expect_network_order && swap_network_byte_order_)) {
+ tmp = ((tmp >> 24) & 0x000000ff) | (tmp << 24) |
+ ((tmp >> 8) & 0x0000ff00) | ((tmp << 8) & 0x00ff0000);
+ }
+ *out = tmp;
+ return kResultSuccess;
+ }
+
+ int Skip(uint32_t length) {
+ if (fseek(file_, length, SEEK_CUR) != 0) {
+ return kResultFail;
+ }
+ return kResultSuccess;
+ }
+
+ FILE* file_;
+ bool swap_pcap_byte_order_;
+ const bool swap_network_byte_order_;
+ uint8_t read_buffer_[kMaxReadBufferSize];
+
+ SsrcMap packets_by_ssrc_;
+ std::vector<RtpPacketMarker> packets_;
+ PacketIterator next_packet_it_;
+};
+
+RtpFileReaderImpl* CreateReaderForFormat(RtpFileReader::FileFormat format) {
+ RtpFileReaderImpl* reader = nullptr;
+ switch (format) {
+ case RtpFileReader::kPcap:
+ reader = new PcapReader();
+ break;
+ case RtpFileReader::kRtpDump:
+ reader = new RtpDumpReader();
+ break;
+ case RtpFileReader::kLengthPacketInterleaved:
+ reader = new InterleavedRtpFileReader();
+ break;
+ }
+ return reader;
+}
+
+RtpFileReader* RtpFileReader::Create(FileFormat format,
+ const uint8_t* data,
+ size_t size,
+ const std::set<uint32_t>& ssrc_filter) {
+ std::unique_ptr<RtpFileReaderImpl> reader(CreateReaderForFormat(format));
+
+ FILE* file = tmpfile();
+ if (file == nullptr) {
+ printf("ERROR: Can't open file from memory buffer\n");
+ return nullptr;
+ }
+
+ if (fwrite(reinterpret_cast<const void*>(data), sizeof(uint8_t), size,
+ file) != size) {
+ return nullptr;
+ }
+ rewind(file);
+
+ if (!reader->Init(file, ssrc_filter)) {
+ return nullptr;
+ }
+ return reader.release();
+}
+
+RtpFileReader* RtpFileReader::Create(FileFormat format,
+ absl::string_view filename,
+ const std::set<uint32_t>& ssrc_filter) {
+ RtpFileReaderImpl* reader = CreateReaderForFormat(format);
+ std::string filename_str = std::string(filename);
+ FILE* file = fopen(filename_str.c_str(), "rb");
+ if (file == nullptr) {
+ printf("ERROR: Can't open file: %s\n", filename_str.c_str());
+ return nullptr;
+ }
+
+ if (!reader->Init(file, ssrc_filter)) {
+ delete reader;
+ return nullptr;
+ }
+ return reader;
+}
+
+RtpFileReader* RtpFileReader::Create(FileFormat format,
+ absl::string_view filename) {
+ return RtpFileReader::Create(format, filename, std::set<uint32_t>());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/rtp_file_reader.h b/third_party/libwebrtc/test/rtp_file_reader.h
new file mode 100644
index 0000000000..4e26c71baa
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_reader.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_RTP_FILE_READER_H_
+#define TEST_RTP_FILE_READER_H_
+
+#include <set>
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+namespace test {
+
+struct RtpPacket {
+ // Accommodate for 50 ms packets of 32 kHz PCM16 samples (3200 bytes) plus
+ // some overhead.
+ static const size_t kMaxPacketBufferSize = 3500;
+ uint8_t data[kMaxPacketBufferSize];
+ size_t length;
+ // The length the packet had on wire. Will be different from `length` when
+ // reading a header-only RTP dump.
+ size_t original_length;
+
+ uint32_t time_ms;
+};
+
+class RtpFileReader {
+ public:
+ enum FileFormat { kPcap, kRtpDump, kLengthPacketInterleaved };
+
+ virtual ~RtpFileReader() {}
+ static RtpFileReader* Create(FileFormat format,
+ const uint8_t* data,
+ size_t size,
+ const std::set<uint32_t>& ssrc_filter);
+ static RtpFileReader* Create(FileFormat format, absl::string_view filename);
+ static RtpFileReader* Create(FileFormat format,
+ absl::string_view filename,
+ const std::set<uint32_t>& ssrc_filter);
+ virtual bool NextPacket(RtpPacket* packet) = 0;
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_RTP_FILE_READER_H_
diff --git a/third_party/libwebrtc/test/rtp_file_reader_unittest.cc b/third_party/libwebrtc/test/rtp_file_reader_unittest.cc
new file mode 100644
index 0000000000..995d9fbc9d
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_reader_unittest.cc
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/rtp_file_reader.h"
+
+#include <map>
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+
+class TestRtpFileReader : public ::testing::Test {
+ public:
+ void Init(const std::string& filename, bool headers_only_file) {
+ std::string filepath =
+ test::ResourcePath("video_coding/" + filename, "rtp");
+ rtp_packet_source_.reset(
+ test::RtpFileReader::Create(test::RtpFileReader::kRtpDump, filepath));
+ ASSERT_TRUE(rtp_packet_source_.get() != NULL);
+ headers_only_file_ = headers_only_file;
+ }
+
+ int CountRtpPackets() {
+ test::RtpPacket packet;
+ int c = 0;
+ while (rtp_packet_source_->NextPacket(&packet)) {
+ if (headers_only_file_)
+ EXPECT_LT(packet.length, packet.original_length);
+ else
+ EXPECT_EQ(packet.length, packet.original_length);
+ c++;
+ }
+ return c;
+ }
+
+ private:
+ std::unique_ptr<test::RtpFileReader> rtp_packet_source_;
+ bool headers_only_file_;
+};
+
+TEST_F(TestRtpFileReader, Test60Packets) {
+ Init("pltype103", false);
+ EXPECT_EQ(60, CountRtpPackets());
+}
+
+TEST_F(TestRtpFileReader, Test60PacketsHeaderOnly) {
+ Init("pltype103_header_only", true);
+ EXPECT_EQ(60, CountRtpPackets());
+}
+
+typedef std::map<uint32_t, int> PacketsPerSsrc;
+
+class TestPcapFileReader : public ::testing::Test {
+ public:
+ void Init(const std::string& filename) {
+ std::string filepath =
+ test::ResourcePath("video_coding/" + filename, "pcap");
+ rtp_packet_source_.reset(
+ test::RtpFileReader::Create(test::RtpFileReader::kPcap, filepath));
+ ASSERT_TRUE(rtp_packet_source_.get() != NULL);
+ }
+
+ int CountRtpPackets() {
+ int c = 0;
+ test::RtpPacket packet;
+ while (rtp_packet_source_->NextPacket(&packet)) {
+ EXPECT_EQ(packet.length, packet.original_length);
+ c++;
+ }
+ return c;
+ }
+
+ PacketsPerSsrc CountRtpPacketsPerSsrc() {
+ PacketsPerSsrc pps;
+ test::RtpPacket packet;
+ while (rtp_packet_source_->NextPacket(&packet)) {
+ rtc::ArrayView<const uint8_t> raw(packet.data, packet.length);
+ if (IsRtpPacket(raw)) {
+ pps[ParseRtpSsrc(raw)]++;
+ }
+ }
+ return pps;
+ }
+
+ private:
+ std::unique_ptr<test::RtpFileReader> rtp_packet_source_;
+};
+
+TEST_F(TestPcapFileReader, TestEthernetIIFrame) {
+ Init("frame-ethernet-ii");
+ EXPECT_EQ(368, CountRtpPackets());
+}
+
+TEST_F(TestPcapFileReader, TestLoopbackFrame) {
+ Init("frame-loopback");
+ EXPECT_EQ(491, CountRtpPackets());
+}
+
+TEST_F(TestPcapFileReader, TestTwoSsrc) {
+ Init("ssrcs-2");
+ PacketsPerSsrc pps = CountRtpPacketsPerSsrc();
+ EXPECT_EQ(2UL, pps.size());
+ EXPECT_EQ(370, pps[0x78d48f61]);
+ EXPECT_EQ(60, pps[0xae94130b]);
+}
+
+TEST_F(TestPcapFileReader, TestThreeSsrc) {
+ Init("ssrcs-3");
+ PacketsPerSsrc pps = CountRtpPacketsPerSsrc();
+ EXPECT_EQ(3UL, pps.size());
+ EXPECT_EQ(162, pps[0x938c5eaa]);
+ EXPECT_EQ(113, pps[0x59fe6ef0]);
+ EXPECT_EQ(61, pps[0xed2bd2ac]);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/rtp_file_writer.cc b/third_party/libwebrtc/test/rtp_file_writer.cc
new file mode 100644
index 0000000000..22f664abc8
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_writer.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/rtp_file_writer.h"
+
+#include <stdint.h>
+#include <stdio.h>
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+static const uint16_t kPacketHeaderSize = 8;
+static const char kFirstLine[] = "#!rtpplay1.0 0.0.0.0/0\n";
+
+// Write RTP packets to file in rtpdump format, as documented at:
+// http://www.cs.columbia.edu/irt/software/rtptools/
+class RtpDumpWriter : public RtpFileWriter {
+ public:
+ explicit RtpDumpWriter(FILE* file) : file_(file) {
+ RTC_CHECK(file_ != NULL);
+ Init();
+ }
+ ~RtpDumpWriter() override {
+ if (file_ != NULL) {
+ fclose(file_);
+ file_ = NULL;
+ }
+ }
+
+ RtpDumpWriter(const RtpDumpWriter&) = delete;
+ RtpDumpWriter& operator=(const RtpDumpWriter&) = delete;
+
+ bool WritePacket(const RtpPacket* packet) override {
+ if (!first_packet_time_) {
+ first_packet_time_ = packet->time_ms;
+ }
+ uint16_t len = static_cast<uint16_t>(packet->length + kPacketHeaderSize);
+ uint16_t plen = static_cast<uint16_t>(packet->original_length);
+ uint32_t offset = packet->time_ms - *first_packet_time_;
+ RTC_CHECK(WriteUint16(len));
+ RTC_CHECK(WriteUint16(plen));
+ RTC_CHECK(WriteUint32(offset));
+ return fwrite(packet->data, sizeof(uint8_t), packet->length, file_) ==
+ packet->length;
+ }
+
+ private:
+ bool Init() {
+ fprintf(file_, "%s", kFirstLine);
+
+ RTC_CHECK(WriteUint32(0));
+ RTC_CHECK(WriteUint32(0));
+ RTC_CHECK(WriteUint32(0));
+ RTC_CHECK(WriteUint16(0));
+ RTC_CHECK(WriteUint16(0));
+
+ return true;
+ }
+
+ bool WriteUint32(uint32_t in) {
+ // Loop through shifts = {24, 16, 8, 0}.
+ for (int shifts = 24; shifts >= 0; shifts -= 8) {
+ uint8_t tmp = static_cast<uint8_t>((in >> shifts) & 0xFF);
+ if (fwrite(&tmp, sizeof(uint8_t), 1, file_) != 1)
+ return false;
+ }
+ return true;
+ }
+
+ bool WriteUint16(uint16_t in) {
+ // Write 8 MSBs.
+ uint8_t tmp = static_cast<uint8_t>((in >> 8) & 0xFF);
+ if (fwrite(&tmp, sizeof(uint8_t), 1, file_) != 1)
+ return false;
+ // Write 8 LSBs.
+ tmp = static_cast<uint8_t>(in & 0xFF);
+ if (fwrite(&tmp, sizeof(uint8_t), 1, file_) != 1)
+ return false;
+ return true;
+ }
+
+ FILE* file_;
+ absl::optional<uint32_t> first_packet_time_;
+};
+
+RtpFileWriter* RtpFileWriter::Create(FileFormat format,
+ const std::string& filename) {
+ FILE* file = fopen(filename.c_str(), "wb");
+ if (file == NULL) {
+ printf("ERROR: Can't open file: %s\n", filename.c_str());
+ return NULL;
+ }
+ switch (format) {
+ case kRtpDump:
+ return new RtpDumpWriter(file);
+ }
+ fclose(file);
+ return NULL;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/rtp_file_writer.h b/third_party/libwebrtc/test/rtp_file_writer.h
new file mode 100644
index 0000000000..5e560d7375
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_writer.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_RTP_FILE_WRITER_H_
+#define TEST_RTP_FILE_WRITER_H_
+
+#include <string>
+
+#include "test/rtp_file_reader.h"
+
+namespace webrtc {
+namespace test {
+class RtpFileWriter {
+ public:
+ enum FileFormat {
+ kRtpDump,
+ };
+
+ virtual ~RtpFileWriter() {}
+ static RtpFileWriter* Create(FileFormat format, const std::string& filename);
+
+ virtual bool WritePacket(const RtpPacket* packet) = 0;
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_RTP_FILE_WRITER_H_
diff --git a/third_party/libwebrtc/test/rtp_file_writer_unittest.cc b/third_party/libwebrtc/test/rtp_file_writer_unittest.cc
new file mode 100644
index 0000000000..2396d7c346
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_file_writer_unittest.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/rtp_file_writer.h"
+
+#include <stdint.h>
+#include <string.h>
+
+#include <memory>
+
+#include "test/gtest.h"
+#include "test/rtp_file_reader.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+
+class RtpFileWriterTest : public ::testing::Test {
+ public:
+ void Init(const std::string& filename) {
+ filename_ = test::OutputPath() + filename;
+ rtp_writer_.reset(
+ test::RtpFileWriter::Create(test::RtpFileWriter::kRtpDump, filename_));
+ }
+
+ void WriteRtpPackets(int num_packets, int time_ms_offset = 0) {
+ ASSERT_TRUE(rtp_writer_.get() != NULL);
+ test::RtpPacket packet;
+ for (int i = 1; i <= num_packets; ++i) {
+ packet.length = i;
+ packet.original_length = i;
+ packet.time_ms = i + time_ms_offset;
+ memset(packet.data, i, packet.length);
+ EXPECT_TRUE(rtp_writer_->WritePacket(&packet));
+ }
+ }
+
+ void CloseOutputFile() { rtp_writer_.reset(); }
+
+ void VerifyFileContents(int expected_packets) {
+ ASSERT_TRUE(rtp_writer_.get() == NULL)
+ << "Must call CloseOutputFile before VerifyFileContents";
+ std::unique_ptr<test::RtpFileReader> rtp_reader(
+ test::RtpFileReader::Create(test::RtpFileReader::kRtpDump, filename_));
+ ASSERT_TRUE(rtp_reader.get() != NULL);
+ test::RtpPacket packet;
+ int i = 0;
+ while (rtp_reader->NextPacket(&packet)) {
+ ++i;
+ EXPECT_EQ(static_cast<size_t>(i), packet.length);
+ EXPECT_EQ(static_cast<size_t>(i), packet.original_length);
+ EXPECT_EQ(static_cast<uint32_t>(i - 1), packet.time_ms);
+ for (int j = 0; j < i; ++j) {
+ EXPECT_EQ(i, packet.data[j]);
+ }
+ }
+ EXPECT_EQ(expected_packets, i);
+ }
+
+ private:
+ std::unique_ptr<test::RtpFileWriter> rtp_writer_;
+ std::string filename_;
+};
+
+TEST_F(RtpFileWriterTest, WriteToRtpDump) {
+ Init("test_rtp_file_writer.rtp");
+ WriteRtpPackets(10);
+ CloseOutputFile();
+ VerifyFileContents(10);
+}
+
+TEST_F(RtpFileWriterTest, WriteToRtpDumpWithOffset) {
+ Init("test_rtp_file_writer.rtp");
+ WriteRtpPackets(10, 100);
+ CloseOutputFile();
+ VerifyFileContents(10);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/rtp_rtcp_observer.h b/third_party/libwebrtc/test/rtp_rtcp_observer.h
new file mode 100644
index 0000000000..06a438e712
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_rtcp_observer.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_RTP_RTCP_OBSERVER_H_
+#define TEST_RTP_RTCP_OBSERVER_H_
+
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/test/simulated_network.h"
+#include "api/units/time_delta.h"
+#include "call/simulated_packet_receiver.h"
+#include "call/video_send_stream.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+#include "rtc_base/event.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/direct_transport.h"
+#include "test/gtest.h"
+
+namespace {
+constexpr webrtc::TimeDelta kShortTimeout = webrtc::TimeDelta::Millis(500);
+}
+
+namespace webrtc {
+namespace test {
+
+class PacketTransport;
+
+class RtpRtcpObserver {
+ public:
+ enum Action {
+ SEND_PACKET,
+ DROP_PACKET,
+ };
+
+ virtual ~RtpRtcpObserver() {}
+
+ virtual bool Wait() {
+ if (field_trial::IsEnabled("WebRTC-QuickPerfTest")) {
+ observation_complete_.Wait(kShortTimeout);
+ return true;
+ }
+ return observation_complete_.Wait(timeout_);
+ }
+
+ virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
+ return SEND_PACKET;
+ }
+
+ virtual Action OnSendRtcp(const uint8_t* packet, size_t length) {
+ return SEND_PACKET;
+ }
+
+ virtual Action OnReceiveRtp(const uint8_t* packet, size_t length) {
+ return SEND_PACKET;
+ }
+
+ virtual Action OnReceiveRtcp(const uint8_t* packet, size_t length) {
+ return SEND_PACKET;
+ }
+
+ protected:
+ RtpRtcpObserver() : RtpRtcpObserver(TimeDelta::Zero()) {}
+ explicit RtpRtcpObserver(TimeDelta event_timeout) : timeout_(event_timeout) {}
+
+ rtc::Event observation_complete_;
+
+ private:
+ const TimeDelta timeout_;
+};
+
+class PacketTransport : public test::DirectTransport {
+ public:
+ enum TransportType { kReceiver, kSender };
+
+ PacketTransport(TaskQueueBase* task_queue,
+ Call* send_call,
+ RtpRtcpObserver* observer,
+ TransportType transport_type,
+ const std::map<uint8_t, MediaType>& payload_type_map,
+ std::unique_ptr<SimulatedPacketReceiverInterface> nw_pipe,
+ rtc::ArrayView<const RtpExtension> audio_extensions,
+ rtc::ArrayView<const RtpExtension> video_extensions)
+ : test::DirectTransport(task_queue,
+ std::move(nw_pipe),
+ send_call,
+ payload_type_map,
+ audio_extensions,
+ video_extensions),
+ observer_(observer),
+ transport_type_(transport_type) {}
+
+ private:
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override {
+ EXPECT_TRUE(IsRtpPacket(rtc::MakeArrayView(packet, length)));
+ RtpRtcpObserver::Action action = RtpRtcpObserver::SEND_PACKET;
+ if (observer_) {
+ if (transport_type_ == kSender) {
+ action = observer_->OnSendRtp(packet, length);
+ } else {
+ action = observer_->OnReceiveRtp(packet, length);
+ }
+ }
+ switch (action) {
+ case RtpRtcpObserver::DROP_PACKET:
+ // Drop packet silently.
+ return true;
+ case RtpRtcpObserver::SEND_PACKET:
+ return test::DirectTransport::SendRtp(packet, length, options);
+ }
+ return true; // Will never happen, makes compiler happy.
+ }
+
+ bool SendRtcp(const uint8_t* packet, size_t length) override {
+ EXPECT_TRUE(IsRtcpPacket(rtc::MakeArrayView(packet, length)));
+ RtpRtcpObserver::Action action = RtpRtcpObserver::SEND_PACKET;
+ if (observer_) {
+ if (transport_type_ == kSender) {
+ action = observer_->OnSendRtcp(packet, length);
+ } else {
+ action = observer_->OnReceiveRtcp(packet, length);
+ }
+ }
+ switch (action) {
+ case RtpRtcpObserver::DROP_PACKET:
+ // Drop packet silently.
+ return true;
+ case RtpRtcpObserver::SEND_PACKET:
+ return test::DirectTransport::SendRtcp(packet, length);
+ }
+ return true; // Will never happen, makes compiler happy.
+ }
+
+ RtpRtcpObserver* const observer_;
+ TransportType transport_type_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_RTP_RTCP_OBSERVER_H_
diff --git a/third_party/libwebrtc/test/rtp_test_utils_gn/moz.build b/third_party/libwebrtc/test/rtp_test_utils_gn/moz.build
new file mode 100644
index 0000000000..25bb2ee0a1
--- /dev/null
+++ b/third_party/libwebrtc/test/rtp_test_utils_gn/moz.build
@@ -0,0 +1,216 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("rtp_test_utils_gn")
diff --git a/third_party/libwebrtc/test/run_loop.cc b/third_party/libwebrtc/test/run_loop.cc
new file mode 100644
index 0000000000..7cc80ab481
--- /dev/null
+++ b/third_party/libwebrtc/test/run_loop.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/run_loop.h"
+
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+namespace test {
+
+RunLoop::RunLoop() {
+ worker_thread_.WrapCurrent();
+}
+
+RunLoop::~RunLoop() {
+ worker_thread_.UnwrapCurrent();
+}
+
+TaskQueueBase* RunLoop::task_queue() {
+ return &worker_thread_;
+}
+
+void RunLoop::Run() {
+ worker_thread_.ProcessMessages(WorkerThread::kForever);
+}
+
+void RunLoop::Quit() {
+ socket_server_.FailNextWait();
+}
+
+void RunLoop::Flush() {
+ worker_thread_.PostTask([this]() { socket_server_.FailNextWait(); });
+ // If a test clock is used, like with GlobalSimulatedTimeController then the
+ // thread will loop forever since time never increases. Since the clock is
+ // simulated, 0ms can be used as the loop delay, which will process all
+ // messages ready for execution.
+ int cms = rtc::GetClockForTesting() ? 0 : 1000;
+ worker_thread_.ProcessMessages(cms);
+}
+
+RunLoop::FakeSocketServer::FakeSocketServer() = default;
+RunLoop::FakeSocketServer::~FakeSocketServer() = default;
+
+void RunLoop::FakeSocketServer::FailNextWait() {
+ fail_next_wait_ = true;
+}
+
+bool RunLoop::FakeSocketServer::Wait(webrtc::TimeDelta max_wait_duration,
+ bool process_io) {
+ if (fail_next_wait_) {
+ fail_next_wait_ = false;
+ return false;
+ }
+ return true;
+}
+
+void RunLoop::FakeSocketServer::WakeUp() {}
+
+rtc::Socket* RunLoop::FakeSocketServer::CreateSocket(int family, int type) {
+ return nullptr;
+}
+
+RunLoop::WorkerThread::WorkerThread(rtc::SocketServer* ss)
+ : rtc::Thread(ss), tq_setter_(this) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/run_loop.h b/third_party/libwebrtc/test/run_loop.h
new file mode 100644
index 0000000000..8a2bf54402
--- /dev/null
+++ b/third_party/libwebrtc/test/run_loop.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_RUN_LOOP_H_
+#define TEST_RUN_LOOP_H_
+
+#include <utility>
+
+#include "absl/functional/any_invocable.h"
+#include "api/task_queue/task_queue_base.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace test {
+
+// This utility class allows you to run a TaskQueue supported interface on the
+// main test thread, call Run() while doing things asynchonously and break
+// the loop (from the same thread) from a callback by calling Quit().
+class RunLoop {
+ public:
+ RunLoop();
+ ~RunLoop();
+
+ TaskQueueBase* task_queue();
+
+ void Run();
+ void Quit();
+
+ void Flush();
+
+ void PostTask(absl::AnyInvocable<void() &&> task) {
+ task_queue()->PostTask(std::move(task));
+ }
+
+ private:
+ class FakeSocketServer : public rtc::SocketServer {
+ public:
+ FakeSocketServer();
+ ~FakeSocketServer();
+
+ void FailNextWait();
+
+ private:
+ bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override;
+ void WakeUp() override;
+
+ rtc::Socket* CreateSocket(int family, int type) override;
+
+ private:
+ bool fail_next_wait_ = false;
+ };
+
+ class WorkerThread : public rtc::Thread {
+ public:
+ explicit WorkerThread(rtc::SocketServer* ss);
+
+ private:
+ CurrentTaskQueueSetter tq_setter_;
+ };
+
+ FakeSocketServer socket_server_;
+ WorkerThread worker_thread_{&socket_server_};
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_RUN_LOOP_H_
diff --git a/third_party/libwebrtc/test/run_loop_unittest.cc b/third_party/libwebrtc/test/run_loop_unittest.cc
new file mode 100644
index 0000000000..80f0bcbdcc
--- /dev/null
+++ b/third_party/libwebrtc/test/run_loop_unittest.cc
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/run_loop.h"
+
+#include "api/units/time_delta.h"
+#include "rtc_base/task_queue.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(RunLoopTest, TaskQueueOnThread) {
+ test::RunLoop loop;
+ EXPECT_EQ(TaskQueueBase::Current(), loop.task_queue());
+ EXPECT_TRUE(loop.task_queue()->IsCurrent());
+}
+
+TEST(RunLoopTest, Flush) {
+ test::RunLoop loop;
+ int counter = 0;
+ loop.PostTask([&counter]() { ++counter; });
+ EXPECT_EQ(counter, 0);
+ loop.Flush();
+ EXPECT_EQ(counter, 1);
+}
+
+TEST(RunLoopTest, Delayed) {
+ test::RunLoop loop;
+ bool ran = false;
+ loop.task_queue()->PostDelayedTask(
+ [&ran, &loop]() {
+ ran = true;
+ loop.Quit();
+ },
+ TimeDelta::Millis(100));
+ loop.Flush();
+ EXPECT_FALSE(ran);
+ loop.Run();
+ EXPECT_TRUE(ran);
+}
+
+TEST(RunLoopTest, PostAndQuit) {
+ test::RunLoop loop;
+ bool ran = false;
+ loop.PostTask([&ran, &loop]() {
+ ran = true;
+ loop.Quit();
+ });
+ loop.Run();
+ EXPECT_TRUE(ran);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/run_test.cc b/third_party/libwebrtc/test/run_test.cc
new file mode 100644
index 0000000000..7af47b9faf
--- /dev/null
+++ b/third_party/libwebrtc/test/run_test.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/run_test.h"
+
+namespace webrtc {
+namespace test {
+
+void RunTest(void (*test)()) {
+ (*test)();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/run_test.h b/third_party/libwebrtc/test/run_test.h
new file mode 100644
index 0000000000..bd05584365
--- /dev/null
+++ b/third_party/libwebrtc/test/run_test.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_RUN_TEST_H_
+#define TEST_RUN_TEST_H_
+
+namespace webrtc {
+namespace test {
+
+// Running a test function on a separate thread, if required by the OS.
+void RunTest(void (*test)());
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_RUN_TEST_H_
diff --git a/third_party/libwebrtc/test/scenario/BUILD.gn b/third_party/libwebrtc/test/scenario/BUILD.gn
new file mode 100644
index 0000000000..5da6dce87d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/BUILD.gn
@@ -0,0 +1,200 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_library("column_printer") {
+ testonly = true
+ sources = [
+ "column_printer.cc",
+ "column_printer.h",
+ ]
+ deps = [
+ "../../rtc_base:macromagic",
+ "../../rtc_base:stringutils",
+ "../logging:log_writer",
+ ]
+}
+
+if (rtc_include_tests && !build_with_chromium) {
+ scenario_resources = [
+ "../../resources/difficult_photo_1850_1110.yuv",
+ "../../resources/photo_1850_1110.yuv",
+ "../../resources/presentation_1850_1110.yuv",
+ "../../resources/web_screenshot_1850_1110.yuv",
+ ]
+ scenario_unittest_resources = [ "../../resources/foreman_cif.yuv" ]
+
+ if (is_ios) {
+ bundle_data("scenario_resources_bundle_data") {
+ testonly = true
+ sources = scenario_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ bundle_data("scenario_unittest_resources_bundle_data") {
+ testonly = true
+ sources = scenario_unittest_resources
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+ }
+
+ rtc_library("scenario") {
+ testonly = true
+ sources = [
+ "audio_stream.cc",
+ "audio_stream.h",
+ "call_client.cc",
+ "call_client.h",
+ "hardware_codecs.cc",
+ "hardware_codecs.h",
+ "network_node.cc",
+ "network_node.h",
+ "performance_stats.cc",
+ "performance_stats.h",
+ "scenario.cc",
+ "scenario.h",
+ "scenario_config.cc",
+ "scenario_config.h",
+ "stats_collection.cc",
+ "stats_collection.h",
+ "video_frame_matcher.cc",
+ "video_frame_matcher.h",
+ "video_stream.cc",
+ "video_stream.h",
+ ]
+ deps = [
+ ":column_printer",
+ "../:fake_video_codecs",
+ "../:fileutils",
+ "../:test_common",
+ "../:test_support",
+ "../:video_test_common",
+ "../../api:array_view",
+ "../../api:create_frame_generator",
+ "../../api:fec_controller_api",
+ "../../api:frame_generator_api",
+ "../../api:libjingle_peerconnection_api",
+ "../../api:rtc_event_log_output_file",
+ "../../api:rtp_parameters",
+ "../../api:sequence_checker",
+ "../../api:time_controller",
+ "../../api:time_controller",
+ "../../api:transport_api",
+ "../../api/audio_codecs:builtin_audio_decoder_factory",
+ "../../api/audio_codecs:builtin_audio_encoder_factory",
+ "../../api/rtc_event_log",
+ "../../api/rtc_event_log:rtc_event_log_factory",
+ "../../api/task_queue",
+ "../../api/test/video:function_video_factory",
+ "../../api/transport:network_control",
+ "../../api/units:data_rate",
+ "../../api/units:data_size",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
+ "../../api/video:builtin_video_bitrate_allocator_factory",
+ "../../api/video:video_frame",
+ "../../api/video:video_rtp_headers",
+ "../../api/video_codecs:scalability_mode",
+ "../../api/video_codecs:video_codecs_api",
+ "../../audio",
+ "../../call",
+ "../../call:call_interfaces",
+ "../../call:rtp_sender",
+ "../../call:simulated_network",
+ "../../call:video_stream_api",
+ "../../common_video",
+ "../../media:media_constants",
+ "../../media:rtc_audio_video",
+ "../../media:rtc_internal_video_codecs",
+ "../../media:rtc_media_base",
+ "../../modules/audio_device",
+ "../../modules/audio_device:audio_device_impl",
+ "../../modules/audio_device:mock_audio_device",
+ "../../modules/audio_mixer:audio_mixer_impl",
+ "../../modules/audio_processing",
+ "../../modules/congestion_controller/goog_cc:test_goog_cc_printer",
+ "../../modules/rtp_rtcp",
+ "../../modules/rtp_rtcp:mock_rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../modules/video_coding:video_codec_interface",
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding:webrtc_h264",
+ "../../modules/video_coding:webrtc_multiplex",
+ "../../modules/video_coding:webrtc_vp8",
+ "../../modules/video_coding:webrtc_vp9",
+ "../../modules/video_coding/svc:scalability_mode_util",
+ "../../rtc_base:checks",
+ "../../rtc_base:copy_on_write_buffer",
+ "../../rtc_base:net_helper",
+ "../../rtc_base:refcount",
+ "../../rtc_base:rtc_base_tests_utils",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:rtc_numerics",
+ "../../rtc_base:rtc_stats_counters",
+ "../../rtc_base:safe_minmax",
+ "../../rtc_base:socket_address",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base:threading",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/task_utils:repeating_task",
+ "../../system_wrappers",
+ "../../system_wrappers:field_trial",
+ "../../video/config:streams_config",
+ "../logging:log_writer",
+ "../network:emulated_network",
+ "../time_controller",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/flags:flag",
+ "//third_party/abseil-cpp/absl/flags:parse",
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+ if (is_android) {
+ deps += [ "../../modules/video_coding:android_codec_factory_helper" ]
+ } else if (is_ios || is_mac) {
+ deps += [ "../../modules/video_coding:objc_codec_factory_helper" ]
+ }
+ if (rtc_enable_protobuf) {
+ deps += [ "../../modules/audio_coding:ana_config_proto" ]
+ }
+ data = scenario_resources
+ if (is_ios) {
+ deps += [ ":scenario_resources_bundle_data" ]
+ }
+ }
+ rtc_library("scenario_unittests") {
+ testonly = true
+ sources = [
+ "performance_stats_unittest.cc",
+ "probing_test.cc",
+ "scenario_unittest.cc",
+ "stats_collection_unittest.cc",
+ "video_stream_unittest.cc",
+ ]
+ deps = [
+ ":scenario",
+ "../../api/test/network_emulation",
+ "../../api/test/network_emulation:create_cross_traffic",
+ "../../logging:mocks",
+ "../../rtc_base:checks",
+ "../../system_wrappers",
+ "../../system_wrappers:field_trial",
+ "../../test:field_trial",
+ "../../test:test_support",
+ "../logging:log_writer",
+ "//testing/gmock",
+ ]
+ data = scenario_unittest_resources
+ if (is_ios) {
+ deps += [ ":scenario_unittest_resources_bundle_data" ]
+ }
+ }
+}
diff --git a/third_party/libwebrtc/test/scenario/OWNERS b/third_party/libwebrtc/test/scenario/OWNERS
new file mode 100644
index 0000000000..6698afbf02
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/OWNERS
@@ -0,0 +1,2 @@
+srte@webrtc.org
+perkj@webrtc.org
diff --git a/third_party/libwebrtc/test/scenario/audio_stream.cc b/third_party/libwebrtc/test/scenario/audio_stream.cc
new file mode 100644
index 0000000000..5f2eff12ff
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/audio_stream.cc
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/audio_stream.h"
+
+#include "absl/memory/memory.h"
+#include "test/call_test.h"
+
+#if WEBRTC_ENABLE_PROTOBUF
+RTC_PUSH_IGNORING_WUNDEF()
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
+#else
+#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
+#endif
+RTC_POP_IGNORING_WUNDEF()
+#endif
+
+namespace webrtc {
+namespace test {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kAbsSendTimeExtensionId
+};
+
+absl::optional<std::string> CreateAdaptationString(
+ AudioStreamConfig::NetworkAdaptation config) {
+#if WEBRTC_ENABLE_PROTOBUF
+
+ audio_network_adaptor::config::ControllerManager cont_conf;
+ if (config.frame.max_rate_for_60_ms.IsFinite()) {
+ auto controller =
+ cont_conf.add_controllers()->mutable_frame_length_controller();
+ controller->set_fl_decreasing_packet_loss_fraction(
+ config.frame.min_packet_loss_for_decrease);
+ controller->set_fl_increasing_packet_loss_fraction(
+ config.frame.max_packet_loss_for_increase);
+
+ controller->set_fl_20ms_to_60ms_bandwidth_bps(
+ config.frame.min_rate_for_20_ms.bps<int32_t>());
+ controller->set_fl_60ms_to_20ms_bandwidth_bps(
+ config.frame.max_rate_for_60_ms.bps<int32_t>());
+
+ if (config.frame.max_rate_for_120_ms.IsFinite()) {
+ controller->set_fl_60ms_to_120ms_bandwidth_bps(
+ config.frame.min_rate_for_60_ms.bps<int32_t>());
+ controller->set_fl_120ms_to_60ms_bandwidth_bps(
+ config.frame.max_rate_for_120_ms.bps<int32_t>());
+ }
+ }
+ cont_conf.add_controllers()->mutable_bitrate_controller();
+ std::string config_string = cont_conf.SerializeAsString();
+ return config_string;
+#else
+ RTC_LOG(LS_ERROR) << "audio_network_adaptation is enabled"
+ " but WEBRTC_ENABLE_PROTOBUF is false.\n"
+ "Ignoring settings.";
+ return absl::nullopt;
+#endif // WEBRTC_ENABLE_PROTOBUF
+}
+} // namespace
+
+std::vector<RtpExtension> GetAudioRtpExtensions(
+ const AudioStreamConfig& config) {
+ std::vector<RtpExtension> extensions;
+ if (config.stream.in_bandwidth_estimation) {
+ extensions.push_back({RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId});
+ }
+ if (config.stream.abs_send_time) {
+ extensions.push_back(
+ {RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId});
+ }
+ return extensions;
+}
+
+SendAudioStream::SendAudioStream(
+ CallClient* sender,
+ AudioStreamConfig config,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ Transport* send_transport)
+ : sender_(sender), config_(config) {
+ AudioSendStream::Config send_config(send_transport);
+ ssrc_ = sender->GetNextAudioSsrc();
+ send_config.rtp.ssrc = ssrc_;
+ SdpAudioFormat::Parameters sdp_params;
+ if (config.source.channels == 2)
+ sdp_params["stereo"] = "1";
+ if (config.encoder.initial_frame_length != TimeDelta::Millis(20))
+ sdp_params["ptime"] =
+ std::to_string(config.encoder.initial_frame_length.ms());
+ if (config.encoder.enable_dtx)
+ sdp_params["usedtx"] = "1";
+
+ // SdpAudioFormat::num_channels indicates that the encoder is capable of
+ // stereo, but the actual channel count used is based on the "stereo"
+ // parameter.
+ send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
+ CallTest::kAudioSendPayloadType, {"opus", 48000, 2, sdp_params});
+ RTC_DCHECK_LE(config.source.channels, 2);
+ send_config.encoder_factory = encoder_factory;
+
+ bool use_fixed_rate = !config.encoder.min_rate && !config.encoder.max_rate;
+ if (use_fixed_rate)
+ send_config.send_codec_spec->target_bitrate_bps =
+ config.encoder.fixed_rate.bps();
+ if (!config.adapt.binary_proto.empty()) {
+ send_config.audio_network_adaptor_config = config.adapt.binary_proto;
+ } else if (config.network_adaptation) {
+ send_config.audio_network_adaptor_config =
+ CreateAdaptationString(config.adapt);
+ }
+ if (config.encoder.allocate_bitrate ||
+ config.stream.in_bandwidth_estimation) {
+ DataRate min_rate = DataRate::Infinity();
+ DataRate max_rate = DataRate::Infinity();
+ if (use_fixed_rate) {
+ min_rate = config.encoder.fixed_rate;
+ max_rate = config.encoder.fixed_rate;
+ } else {
+ min_rate = *config.encoder.min_rate;
+ max_rate = *config.encoder.max_rate;
+ }
+ send_config.min_bitrate_bps = min_rate.bps();
+ send_config.max_bitrate_bps = max_rate.bps();
+ }
+
+ if (config.stream.in_bandwidth_estimation) {
+ send_config.send_codec_spec->transport_cc_enabled = true;
+ }
+ send_config.rtp.extensions = GetAudioRtpExtensions(config);
+
+ sender_->SendTask([&] {
+ send_stream_ = sender_->call_->CreateAudioSendStream(send_config);
+ sender->call_->OnAudioTransportOverheadChanged(
+ sender_->transport_->packet_overhead().bytes());
+ });
+}
+
+SendAudioStream::~SendAudioStream() {
+ sender_->SendTask(
+ [this] { sender_->call_->DestroyAudioSendStream(send_stream_); });
+}
+
+void SendAudioStream::Start() {
+ sender_->SendTask([this] {
+ send_stream_->Start();
+ sender_->call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ });
+}
+
+void SendAudioStream::Stop() {
+ sender_->SendTask([this] { send_stream_->Stop(); });
+}
+
+void SendAudioStream::SetMuted(bool mute) {
+ sender_->SendTask([this, mute] { send_stream_->SetMuted(mute); });
+}
+
+ColumnPrinter SendAudioStream::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "audio_target_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sender_->SendTask([this, &sb] {
+ AudioSendStream::Stats stats = send_stream_->GetStats();
+ sb.AppendFormat("%.0lf", stats.target_bitrate_bps / 8.0);
+ });
+ },
+ 64);
+}
+
+ReceiveAudioStream::ReceiveAudioStream(
+ CallClient* receiver,
+ AudioStreamConfig config,
+ SendAudioStream* send_stream,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ Transport* feedback_transport)
+ : receiver_(receiver), config_(config) {
+ AudioReceiveStreamInterface::Config recv_config;
+ recv_config.rtp.local_ssrc = receiver_->GetNextAudioLocalSsrc();
+ recv_config.rtcp_send_transport = feedback_transport;
+ recv_config.rtp.remote_ssrc = send_stream->ssrc_;
+ receiver->ssrc_media_types_[recv_config.rtp.remote_ssrc] = MediaType::AUDIO;
+ recv_config.rtp.extensions = GetAudioRtpExtensions(config);
+ recv_config.decoder_factory = decoder_factory;
+ recv_config.decoder_map = {
+ {CallTest::kAudioSendPayloadType, {"opus", 48000, 2}}};
+ recv_config.sync_group = config.render.sync_group;
+ receiver_->SendTask([&] {
+ receive_stream_ = receiver_->call_->CreateAudioReceiveStream(recv_config);
+ });
+}
+ReceiveAudioStream::~ReceiveAudioStream() {
+ receiver_->SendTask(
+ [&] { receiver_->call_->DestroyAudioReceiveStream(receive_stream_); });
+}
+
+void ReceiveAudioStream::Start() {
+ receiver_->SendTask([&] {
+ receive_stream_->Start();
+ receiver_->call_->SignalChannelNetworkState(MediaType::AUDIO, kNetworkUp);
+ });
+}
+
+void ReceiveAudioStream::Stop() {
+ receiver_->SendTask([&] { receive_stream_->Stop(); });
+}
+
+AudioReceiveStreamInterface::Stats ReceiveAudioStream::GetStats() const {
+ AudioReceiveStreamInterface::Stats result;
+ receiver_->SendTask([&] {
+ result = receive_stream_->GetStats(/*get_and_clear_legacy_stats=*/true);
+ });
+ return result;
+}
+
+AudioStreamPair::~AudioStreamPair() = default;
+
+AudioStreamPair::AudioStreamPair(
+ CallClient* sender,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ CallClient* receiver,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ AudioStreamConfig config)
+ : config_(config),
+ send_stream_(sender, config, encoder_factory, sender->transport_.get()),
+ receive_stream_(receiver,
+ config,
+ &send_stream_,
+ decoder_factory,
+ receiver->transport_.get()) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/audio_stream.h b/third_party/libwebrtc/test/scenario/audio_stream.h
new file mode 100644
index 0000000000..cbaf9d29eb
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/audio_stream.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_AUDIO_STREAM_H_
+#define TEST_SCENARIO_AUDIO_STREAM_H_
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+// SendAudioStream represents sending of audio. It can be used for starting the
+// stream if neccessary.
+class SendAudioStream {
+ public:
+ ~SendAudioStream();
+
+ SendAudioStream(const SendAudioStream&) = delete;
+ SendAudioStream& operator=(const SendAudioStream&) = delete;
+
+ void Start();
+ void Stop();
+ void SetMuted(bool mute);
+ ColumnPrinter StatsPrinter();
+
+ private:
+ friend class Scenario;
+ friend class AudioStreamPair;
+ friend class ReceiveAudioStream;
+ SendAudioStream(CallClient* sender,
+ AudioStreamConfig config,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ Transport* send_transport);
+ AudioSendStream* send_stream_ = nullptr;
+ CallClient* const sender_;
+ const AudioStreamConfig config_;
+ uint32_t ssrc_;
+};
+
+// ReceiveAudioStream represents an audio receiver. It can't be used directly.
+class ReceiveAudioStream {
+ public:
+ ~ReceiveAudioStream();
+
+ ReceiveAudioStream(const ReceiveAudioStream&) = delete;
+ ReceiveAudioStream& operator=(const ReceiveAudioStream&) = delete;
+
+ void Start();
+ void Stop();
+ AudioReceiveStreamInterface::Stats GetStats() const;
+
+ private:
+ friend class Scenario;
+ friend class AudioStreamPair;
+ ReceiveAudioStream(CallClient* receiver,
+ AudioStreamConfig config,
+ SendAudioStream* send_stream,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ Transport* feedback_transport);
+ AudioReceiveStreamInterface* receive_stream_ = nullptr;
+ CallClient* const receiver_;
+ const AudioStreamConfig config_;
+};
+
+// AudioStreamPair represents an audio streaming session. It can be used to
+// access underlying send and receive classes. It can also be used in calls to
+// the Scenario class.
+class AudioStreamPair {
+ public:
+ ~AudioStreamPair();
+
+ AudioStreamPair(const AudioStreamPair&) = delete;
+ AudioStreamPair& operator=(const AudioStreamPair&) = delete;
+
+ SendAudioStream* send() { return &send_stream_; }
+ ReceiveAudioStream* receive() { return &receive_stream_; }
+
+ private:
+ friend class Scenario;
+ AudioStreamPair(CallClient* sender,
+ rtc::scoped_refptr<AudioEncoderFactory> encoder_factory,
+ CallClient* receiver,
+ rtc::scoped_refptr<AudioDecoderFactory> decoder_factory,
+ AudioStreamConfig config);
+
+ private:
+ const AudioStreamConfig config_;
+ SendAudioStream send_stream_;
+ ReceiveAudioStream receive_stream_;
+};
+
+std::vector<RtpExtension> GetAudioRtpExtensions(
+ const AudioStreamConfig& config);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_AUDIO_STREAM_H_
diff --git a/third_party/libwebrtc/test/scenario/call_client.cc b/third_party/libwebrtc/test/scenario/call_client.cc
new file mode 100644
index 0000000000..c80f58eeeb
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/call_client.cc
@@ -0,0 +1,386 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/call_client.h"
+
+#include <iostream>
+#include <memory>
+#include <utility>
+
+#include "api/media_types.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/transport/network_types.h"
+#include "call/call.h"
+#include "call/rtp_transport_controller_send_factory.h"
+#include "modules/audio_mixer/audio_mixer_impl.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/rtp_util.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+static constexpr size_t kNumSsrcs = 6;
+const uint32_t kSendRtxSsrcs[kNumSsrcs] = {0xBADCAFD, 0xBADCAFE, 0xBADCAFF,
+ 0xBADCB00, 0xBADCB01, 0xBADCB02};
+const uint32_t kVideoSendSsrcs[kNumSsrcs] = {0xC0FFED, 0xC0FFEE, 0xC0FFEF,
+ 0xC0FFF0, 0xC0FFF1, 0xC0FFF2};
+const uint32_t kVideoRecvLocalSsrcs[kNumSsrcs] = {0xDAB001, 0xDAB002, 0xDAB003,
+ 0xDAB004, 0xDAB005, 0xDAB006};
+const uint32_t kAudioSendSsrc = 0xDEADBEEF;
+const uint32_t kReceiverLocalAudioSsrc = 0x1234567;
+
+constexpr int kEventLogOutputIntervalMs = 5000;
+
+CallClientFakeAudio InitAudio(TimeController* time_controller) {
+ CallClientFakeAudio setup;
+ auto capturer = TestAudioDeviceModule::CreatePulsedNoiseCapturer(256, 48000);
+ auto renderer = TestAudioDeviceModule::CreateDiscardRenderer(48000);
+ setup.fake_audio_device = TestAudioDeviceModule::Create(
+ time_controller->GetTaskQueueFactory(), std::move(capturer),
+ std::move(renderer), 1.f);
+ setup.apm = AudioProcessingBuilder().Create();
+ setup.fake_audio_device->Init();
+ AudioState::Config audio_state_config;
+ audio_state_config.audio_mixer = AudioMixerImpl::Create();
+ audio_state_config.audio_processing = setup.apm;
+ audio_state_config.audio_device_module = setup.fake_audio_device;
+ setup.audio_state = AudioState::Create(audio_state_config);
+ setup.fake_audio_device->RegisterAudioCallback(
+ setup.audio_state->audio_transport());
+ return setup;
+}
+
+Call* CreateCall(TimeController* time_controller,
+ RtcEventLog* event_log,
+ CallClientConfig config,
+ LoggingNetworkControllerFactory* network_controller_factory,
+ rtc::scoped_refptr<AudioState> audio_state) {
+ CallConfig call_config(event_log);
+ call_config.bitrate_config.max_bitrate_bps =
+ config.transport.rates.max_rate.bps_or(-1);
+ call_config.bitrate_config.min_bitrate_bps =
+ config.transport.rates.min_rate.bps();
+ call_config.bitrate_config.start_bitrate_bps =
+ config.transport.rates.start_rate.bps();
+ call_config.task_queue_factory = time_controller->GetTaskQueueFactory();
+ call_config.network_controller_factory = network_controller_factory;
+ call_config.audio_state = audio_state;
+ call_config.pacer_burst_interval = config.pacer_burst_interval;
+ call_config.trials = config.field_trials;
+ Clock* clock = time_controller->GetClock();
+ return Call::Create(call_config, clock,
+ RtpTransportControllerSendFactory().Create(
+ call_config.ExtractTransportConfig(), clock));
+}
+
+std::unique_ptr<RtcEventLog> CreateEventLog(
+ TaskQueueFactory* task_queue_factory,
+ LogWriterFactoryInterface* log_writer_factory) {
+ if (!log_writer_factory) {
+ return std::make_unique<RtcEventLogNull>();
+ }
+ auto event_log = RtcEventLogFactory(task_queue_factory)
+ .CreateRtcEventLog(RtcEventLog::EncodingType::NewFormat);
+ bool success = event_log->StartLogging(log_writer_factory->Create(".rtc.dat"),
+ kEventLogOutputIntervalMs);
+ RTC_CHECK(success);
+ return event_log;
+}
+} // namespace
+NetworkControleUpdateCache::NetworkControleUpdateCache(
+ std::unique_ptr<NetworkControllerInterface> controller)
+ : controller_(std::move(controller)) {}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkAvailability(
+ NetworkAvailability msg) {
+ return Update(controller_->OnNetworkAvailability(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkRouteChange(
+ NetworkRouteChange msg) {
+ return Update(controller_->OnNetworkRouteChange(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnProcessInterval(
+ ProcessInterval msg) {
+ return Update(controller_->OnProcessInterval(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnRemoteBitrateReport(
+ RemoteBitrateReport msg) {
+ return Update(controller_->OnRemoteBitrateReport(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnRoundTripTimeUpdate(
+ RoundTripTimeUpdate msg) {
+ return Update(controller_->OnRoundTripTimeUpdate(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnSentPacket(SentPacket msg) {
+ return Update(controller_->OnSentPacket(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnReceivedPacket(
+ ReceivedPacket msg) {
+ return Update(controller_->OnReceivedPacket(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnStreamsConfig(
+ StreamsConfig msg) {
+ return Update(controller_->OnStreamsConfig(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTargetRateConstraints(
+ TargetRateConstraints msg) {
+ return Update(controller_->OnTargetRateConstraints(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTransportLossReport(
+ TransportLossReport msg) {
+ return Update(controller_->OnTransportLossReport(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnTransportPacketsFeedback(
+ TransportPacketsFeedback msg) {
+ return Update(controller_->OnTransportPacketsFeedback(msg));
+}
+NetworkControlUpdate NetworkControleUpdateCache::OnNetworkStateEstimate(
+ NetworkStateEstimate msg) {
+ return Update(controller_->OnNetworkStateEstimate(msg));
+}
+
+NetworkControlUpdate NetworkControleUpdateCache::update_state() const {
+ return update_state_;
+}
+NetworkControlUpdate NetworkControleUpdateCache::Update(
+ NetworkControlUpdate update) {
+ if (update.target_rate)
+ update_state_.target_rate = update.target_rate;
+ if (update.pacer_config)
+ update_state_.pacer_config = update.pacer_config;
+ if (update.congestion_window)
+ update_state_.congestion_window = update.congestion_window;
+ if (!update.probe_cluster_configs.empty())
+ update_state_.probe_cluster_configs = update.probe_cluster_configs;
+ return update;
+}
+
+LoggingNetworkControllerFactory::LoggingNetworkControllerFactory(
+ LogWriterFactoryInterface* log_writer_factory,
+ TransportControllerConfig config) {
+ if (config.cc_factory) {
+ cc_factory_ = config.cc_factory;
+ if (log_writer_factory)
+ RTC_LOG(LS_WARNING)
+ << "Can't log controller state for injected network controllers";
+ } else {
+ if (log_writer_factory) {
+ goog_cc_factory_.AttachWriter(
+ log_writer_factory->Create(".cc_state.txt"));
+ print_cc_state_ = true;
+ }
+ cc_factory_ = &goog_cc_factory_;
+ }
+}
+
+LoggingNetworkControllerFactory::~LoggingNetworkControllerFactory() {}
+
+void LoggingNetworkControllerFactory::LogCongestionControllerStats(
+ Timestamp at_time) {
+ if (print_cc_state_)
+ goog_cc_factory_.PrintState(at_time);
+}
+
+NetworkControlUpdate LoggingNetworkControllerFactory::GetUpdate() const {
+ if (last_controller_)
+ return last_controller_->update_state();
+ return NetworkControlUpdate();
+}
+
+std::unique_ptr<NetworkControllerInterface>
+LoggingNetworkControllerFactory::Create(NetworkControllerConfig config) {
+ auto controller =
+ std::make_unique<NetworkControleUpdateCache>(cc_factory_->Create(config));
+ last_controller_ = controller.get();
+ return controller;
+}
+
+TimeDelta LoggingNetworkControllerFactory::GetProcessInterval() const {
+ return cc_factory_->GetProcessInterval();
+}
+
+void LoggingNetworkControllerFactory::SetRemoteBitrateEstimate(
+ RemoteBitrateReport msg) {
+ if (last_controller_)
+ last_controller_->OnRemoteBitrateReport(msg);
+}
+
+CallClient::CallClient(
+ TimeController* time_controller,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ CallClientConfig config)
+ : time_controller_(time_controller),
+ clock_(time_controller->GetClock()),
+ log_writer_factory_(std::move(log_writer_factory)),
+ network_controller_factory_(log_writer_factory_.get(), config.transport),
+ task_queue_(time_controller->GetTaskQueueFactory()->CreateTaskQueue(
+ "CallClient",
+ TaskQueueFactory::Priority::NORMAL)) {
+ config.field_trials = &field_trials_;
+ SendTask([this, config] {
+ event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(),
+ log_writer_factory_.get());
+ fake_audio_setup_ = InitAudio(time_controller_);
+
+ call_.reset(CreateCall(time_controller_, event_log_.get(), config,
+ &network_controller_factory_,
+ fake_audio_setup_.audio_state));
+ transport_ = std::make_unique<NetworkNodeTransport>(clock_, call_.get());
+ });
+}
+
+CallClient::~CallClient() {
+ SendTask([&] {
+ call_.reset();
+ fake_audio_setup_ = {};
+ rtc::Event done;
+ event_log_->StopLogging([&done] { done.Set(); });
+ done.Wait(rtc::Event::kForever);
+ event_log_.reset();
+ });
+}
+
+ColumnPrinter CallClient::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "pacer_delay call_send_bw",
+ [this](rtc::SimpleStringBuilder& sb) {
+ Call::Stats call_stats = call_->GetStats();
+ sb.AppendFormat("%.3lf %.0lf", call_stats.pacer_delay_ms / 1000.0,
+ call_stats.send_bandwidth_bps / 8.0);
+ },
+ 64);
+}
+
+Call::Stats CallClient::GetStats() {
+ // This call needs to be made on the thread that `call_` was constructed on.
+ Call::Stats stats;
+ SendTask([this, &stats] { stats = call_->GetStats(); });
+ return stats;
+}
+
+DataRate CallClient::target_rate() const {
+ return network_controller_factory_.GetUpdate().target_rate->target_rate;
+}
+
+DataRate CallClient::stable_target_rate() const {
+ return network_controller_factory_.GetUpdate()
+ .target_rate->stable_target_rate;
+}
+
+DataRate CallClient::padding_rate() const {
+ return network_controller_factory_.GetUpdate().pacer_config->pad_rate();
+}
+
+void CallClient::SetRemoteBitrate(DataRate bitrate) {
+ RemoteBitrateReport msg;
+ msg.bandwidth = bitrate;
+ msg.receive_time = clock_->CurrentTime();
+ network_controller_factory_.SetRemoteBitrateEstimate(msg);
+}
+
+void CallClient::UpdateBitrateConstraints(
+ const BitrateConstraints& constraints) {
+ SendTask([this, &constraints]() {
+ call_->GetTransportControllerSend()->SetSdpBitrateParameters(constraints);
+ });
+}
+
+void CallClient::SetAudioReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions) {
+ SendTask([this, &extensions]() {
+ audio_extensions_ = RtpHeaderExtensionMap(extensions);
+ });
+}
+
+void CallClient::SetVideoReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions) {
+ SendTask([this, &extensions]() {
+ video_extensions_ = RtpHeaderExtensionMap(extensions);
+ });
+}
+
+void CallClient::OnPacketReceived(EmulatedIpPacket packet) {
+ MediaType media_type = MediaType::ANY;
+ if (IsRtpPacket(packet.data)) {
+ media_type = ssrc_media_types_[ParseRtpSsrc(packet.data)];
+ task_queue_.PostTask([this, media_type,
+ packet = std::move(packet)]() mutable {
+ RtpHeaderExtensionMap& extension_map = media_type == MediaType::AUDIO
+ ? audio_extensions_
+ : video_extensions_;
+ RtpPacketReceived received_packet(&extension_map, packet.arrival_time);
+ RTC_CHECK(received_packet.Parse(packet.data));
+ call_->Receiver()->DeliverRtpPacket(media_type, received_packet,
+ /*undemuxable_packet_handler=*/
+ [](const RtpPacketReceived& packet) {
+ RTC_CHECK_NOTREACHED();
+ return false;
+ });
+ });
+ } else {
+ task_queue_.PostTask(
+ [call = call_.get(), packet = std::move(packet)]() mutable {
+ call->Receiver()->DeliverRtcpPacket(packet.data);
+ });
+ }
+}
+
+std::unique_ptr<RtcEventLogOutput> CallClient::GetLogWriter(std::string name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return log_writer_factory_->Create(name);
+}
+
+uint32_t CallClient::GetNextVideoSsrc() {
+ RTC_CHECK_LT(next_video_ssrc_index_, kNumSsrcs);
+ return kVideoSendSsrcs[next_video_ssrc_index_++];
+}
+
+uint32_t CallClient::GetNextVideoLocalSsrc() {
+ RTC_CHECK_LT(next_video_local_ssrc_index_, kNumSsrcs);
+ return kVideoRecvLocalSsrcs[next_video_local_ssrc_index_++];
+}
+
+uint32_t CallClient::GetNextAudioSsrc() {
+ RTC_CHECK_LT(next_audio_ssrc_index_, 1);
+ next_audio_ssrc_index_++;
+ return kAudioSendSsrc;
+}
+
+uint32_t CallClient::GetNextAudioLocalSsrc() {
+ RTC_CHECK_LT(next_audio_local_ssrc_index_, 1);
+ next_audio_local_ssrc_index_++;
+ return kReceiverLocalAudioSsrc;
+}
+
+uint32_t CallClient::GetNextRtxSsrc() {
+ RTC_CHECK_LT(next_rtx_ssrc_index_, kNumSsrcs);
+ return kSendRtxSsrcs[next_rtx_ssrc_index_++];
+}
+
+void CallClient::SendTask(std::function<void()> task) {
+ task_queue_.SendTask(std::move(task));
+}
+
+int16_t CallClient::Bind(EmulatedEndpoint* endpoint) {
+ uint16_t port = endpoint->BindReceiver(0, this).value();
+ endpoints_.push_back({endpoint, port});
+ return port;
+}
+
+void CallClient::UnBind() {
+ for (auto ep_port : endpoints_)
+ ep_port.first->UnbindReceiver(ep_port.second);
+}
+
+CallClientPair::~CallClientPair() = default;
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/call_client.h b/third_party/libwebrtc/test/scenario/call_client.h
new file mode 100644
index 0000000000..5d62fc75e7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/call_client.h
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_CALL_CLIENT_H_
+#define TEST_SCENARIO_CALL_CLIENT_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/rtc_event_log/rtc_event_log.h"
+#include "api/rtp_parameters.h"
+#include "api/test/time_controller.h"
+#include "api/units/data_rate.h"
+#include "call/call.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h"
+#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "test/logging/log_writer.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+
+namespace test {
+// Helper class to capture network controller state.
+class NetworkControleUpdateCache : public NetworkControllerInterface {
+ public:
+ explicit NetworkControleUpdateCache(
+ std::unique_ptr<NetworkControllerInterface> controller);
+
+ NetworkControlUpdate OnNetworkAvailability(NetworkAvailability msg) override;
+ NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange msg) override;
+ NetworkControlUpdate OnProcessInterval(ProcessInterval msg) override;
+ NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport msg) override;
+ NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate msg) override;
+ NetworkControlUpdate OnSentPacket(SentPacket msg) override;
+ NetworkControlUpdate OnReceivedPacket(ReceivedPacket msg) override;
+ NetworkControlUpdate OnStreamsConfig(StreamsConfig msg) override;
+ NetworkControlUpdate OnTargetRateConstraints(
+ TargetRateConstraints msg) override;
+ NetworkControlUpdate OnTransportLossReport(TransportLossReport msg) override;
+ NetworkControlUpdate OnTransportPacketsFeedback(
+ TransportPacketsFeedback msg) override;
+ NetworkControlUpdate OnNetworkStateEstimate(
+ NetworkStateEstimate msg) override;
+
+ NetworkControlUpdate update_state() const;
+
+ private:
+ NetworkControlUpdate Update(NetworkControlUpdate update);
+ const std::unique_ptr<NetworkControllerInterface> controller_;
+ NetworkControlUpdate update_state_;
+};
+
+class LoggingNetworkControllerFactory
+ : public NetworkControllerFactoryInterface {
+ public:
+ LoggingNetworkControllerFactory(LogWriterFactoryInterface* log_writer_factory,
+ TransportControllerConfig config);
+
+ ~LoggingNetworkControllerFactory();
+
+ LoggingNetworkControllerFactory(const LoggingNetworkControllerFactory&) =
+ delete;
+ LoggingNetworkControllerFactory& operator=(
+ const LoggingNetworkControllerFactory&) = delete;
+
+ std::unique_ptr<NetworkControllerInterface> Create(
+ NetworkControllerConfig config) override;
+ TimeDelta GetProcessInterval() const override;
+ // TODO(srte): Consider using the Columnprinter interface for this.
+ void LogCongestionControllerStats(Timestamp at_time);
+ void SetRemoteBitrateEstimate(RemoteBitrateReport msg);
+
+ NetworkControlUpdate GetUpdate() const;
+
+ private:
+ GoogCcDebugFactory goog_cc_factory_;
+ NetworkControllerFactoryInterface* cc_factory_ = nullptr;
+ bool print_cc_state_ = false;
+ NetworkControleUpdateCache* last_controller_ = nullptr;
+};
+
+struct CallClientFakeAudio {
+ rtc::scoped_refptr<AudioProcessing> apm;
+ rtc::scoped_refptr<TestAudioDeviceModule> fake_audio_device;
+ rtc::scoped_refptr<AudioState> audio_state;
+};
+// CallClient represents a participant in a call scenario. It is created by the
+// Scenario class and is used as sender and receiver when setting up a media
+// stream session.
+class CallClient : public EmulatedNetworkReceiverInterface {
+ public:
+ CallClient(TimeController* time_controller,
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ CallClientConfig config);
+
+ ~CallClient();
+
+ CallClient(const CallClient&) = delete;
+ CallClient& operator=(const CallClient&) = delete;
+
+ ColumnPrinter StatsPrinter();
+ Call::Stats GetStats();
+ DataRate send_bandwidth() {
+ return DataRate::BitsPerSec(GetStats().send_bandwidth_bps);
+ }
+ DataRate target_rate() const;
+ DataRate stable_target_rate() const;
+ DataRate padding_rate() const;
+ void UpdateBitrateConstraints(const BitrateConstraints& constraints);
+ void SetRemoteBitrate(DataRate bitrate);
+
+ void SetAudioReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions);
+ void SetVideoReceiveRtpHeaderExtensions(
+ rtc::ArrayView<RtpExtension> extensions);
+
+ void OnPacketReceived(EmulatedIpPacket packet) override;
+ std::unique_ptr<RtcEventLogOutput> GetLogWriter(std::string name);
+
+ // Exposed publicly so that tests can execute tasks such as querying stats
+ // for media streams in the expected runtime environment (essentially what
+ // CallClient does internally for GetStats()).
+ void SendTask(std::function<void()> task);
+
+ private:
+ friend class Scenario;
+ friend class CallClientPair;
+ friend class SendVideoStream;
+ friend class VideoStreamPair;
+ friend class ReceiveVideoStream;
+ friend class SendAudioStream;
+ friend class ReceiveAudioStream;
+ friend class AudioStreamPair;
+ friend class NetworkNodeTransport;
+ uint32_t GetNextVideoSsrc();
+ uint32_t GetNextVideoLocalSsrc();
+ uint32_t GetNextAudioSsrc();
+ uint32_t GetNextAudioLocalSsrc();
+ uint32_t GetNextRtxSsrc();
+ int16_t Bind(EmulatedEndpoint* endpoint);
+ void UnBind();
+
+ TimeController* const time_controller_;
+ Clock* clock_;
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_factory_;
+ std::unique_ptr<RtcEventLog> event_log_;
+ LoggingNetworkControllerFactory network_controller_factory_;
+ CallClientFakeAudio fake_audio_setup_;
+ std::unique_ptr<Call> call_;
+ std::unique_ptr<NetworkNodeTransport> transport_;
+ std::vector<std::pair<EmulatedEndpoint*, uint16_t>> endpoints_;
+ RtpHeaderExtensionMap audio_extensions_;
+ RtpHeaderExtensionMap video_extensions_;
+
+ int next_video_ssrc_index_ = 0;
+ int next_video_local_ssrc_index_ = 0;
+ int next_rtx_ssrc_index_ = 0;
+ int next_audio_ssrc_index_ = 0;
+ int next_audio_local_ssrc_index_ = 0;
+ std::map<uint32_t, MediaType> ssrc_media_types_;
+ // Defined last so it's destroyed first.
+ TaskQueueForTest task_queue_;
+
+ const FieldTrialBasedConfig field_trials_;
+};
+
+class CallClientPair {
+ public:
+ ~CallClientPair();
+
+ CallClientPair(const CallClientPair&) = delete;
+ CallClientPair& operator=(const CallClientPair&) = delete;
+
+ CallClient* first() { return first_; }
+ CallClient* second() { return second_; }
+ std::pair<CallClient*, CallClient*> forward() { return {first(), second()}; }
+ std::pair<CallClient*, CallClient*> reverse() { return {second(), first()}; }
+
+ private:
+ friend class Scenario;
+ CallClientPair(CallClient* first, CallClient* second)
+ : first_(first), second_(second) {}
+ CallClient* const first_;
+ CallClient* const second_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_CALL_CLIENT_H_
diff --git a/third_party/libwebrtc/test/scenario/column_printer.cc b/third_party/libwebrtc/test/scenario/column_printer.cc
new file mode 100644
index 0000000000..661c83bd0d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/column_printer.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/column_printer.h"
+
+namespace webrtc {
+namespace test {
+
+ColumnPrinter::ColumnPrinter(const ColumnPrinter&) = default;
+ColumnPrinter::~ColumnPrinter() = default;
+
+ColumnPrinter::ColumnPrinter(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length)
+ : headers_(headers), printer_(printer), max_length_(max_length) {}
+
+ColumnPrinter ColumnPrinter::Fixed(const char* headers, std::string fields) {
+ return ColumnPrinter(
+ headers, [fields](rtc::SimpleStringBuilder& sb) { sb << fields; },
+ fields.size());
+}
+
+ColumnPrinter ColumnPrinter::Lambda(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length) {
+ return ColumnPrinter(headers, printer, max_length);
+}
+
+StatesPrinter::StatesPrinter(std::unique_ptr<RtcEventLogOutput> writer,
+ std::vector<ColumnPrinter> printers)
+ : writer_(std::move(writer)), printers_(printers) {
+ RTC_CHECK(!printers_.empty());
+ for (auto& printer : printers_)
+ buffer_size_ += printer.max_length_ + 1;
+ buffer_.resize(buffer_size_);
+}
+
+StatesPrinter::~StatesPrinter() = default;
+
+void StatesPrinter::PrintHeaders() {
+ if (!writer_)
+ return;
+ writer_->Write(printers_[0].headers_);
+ for (size_t i = 1; i < printers_.size(); ++i) {
+ writer_->Write(" ");
+ writer_->Write(printers_[i].headers_);
+ }
+ writer_->Write("\n");
+}
+
+void StatesPrinter::PrintRow() {
+ // Note that this is run for null output to preserve side effects, this allows
+ // setting break points etc.
+ rtc::SimpleStringBuilder sb(buffer_);
+ printers_[0].printer_(sb);
+ for (size_t i = 1; i < printers_.size(); ++i) {
+ sb << ' ';
+ printers_[i].printer_(sb);
+ }
+ sb << "\n";
+ if (writer_)
+ writer_->Write(std::string(sb.str(), sb.size()));
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/column_printer.h b/third_party/libwebrtc/test/scenario/column_printer.h
new file mode 100644
index 0000000000..529f4597ec
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/column_printer.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_COLUMN_PRINTER_H_
+#define TEST_SCENARIO_COLUMN_PRINTER_H_
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "rtc_base/strings/string_builder.h"
+#include "test/logging/log_writer.h"
+
+namespace webrtc {
+namespace test {
+class ColumnPrinter {
+ public:
+ ColumnPrinter(const ColumnPrinter&);
+ ~ColumnPrinter();
+ static ColumnPrinter Fixed(const char* headers, std::string fields);
+ static ColumnPrinter Lambda(
+ const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length = 256);
+
+ protected:
+ friend class StatesPrinter;
+ const char* headers_;
+ std::function<void(rtc::SimpleStringBuilder&)> printer_;
+ size_t max_length_;
+
+ private:
+ ColumnPrinter(const char* headers,
+ std::function<void(rtc::SimpleStringBuilder&)> printer,
+ size_t max_length);
+};
+
+class StatesPrinter {
+ public:
+ StatesPrinter(std::unique_ptr<RtcEventLogOutput> writer,
+ std::vector<ColumnPrinter> printers);
+
+ ~StatesPrinter();
+
+ StatesPrinter(const StatesPrinter&) = delete;
+ StatesPrinter& operator=(const StatesPrinter&) = delete;
+
+ void PrintHeaders();
+ void PrintRow();
+
+ private:
+ const std::unique_ptr<RtcEventLogOutput> writer_;
+ const std::vector<ColumnPrinter> printers_;
+ size_t buffer_size_ = 0;
+ std::vector<char> buffer_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_COLUMN_PRINTER_H_
diff --git a/third_party/libwebrtc/test/scenario/hardware_codecs.cc b/third_party/libwebrtc/test/scenario/hardware_codecs.cc
new file mode 100644
index 0000000000..cac0f10dc9
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/hardware_codecs.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/hardware_codecs.h"
+
+#include "rtc_base/checks.h"
+
+#ifdef WEBRTC_ANDROID
+#include "modules/video_coding/codecs/test/android_codec_factory_helper.h"
+#endif
+#ifdef WEBRTC_MAC
+#include "modules/video_coding/codecs/test/objc_codec_factory_helper.h"
+#endif
+
+namespace webrtc {
+namespace test {
+std::unique_ptr<VideoEncoderFactory> CreateHardwareEncoderFactory() {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+ return CreateAndroidEncoderFactory();
+#else
+#ifdef WEBRTC_MAC
+ return CreateObjCEncoderFactory();
+#else
+ RTC_DCHECK_NOTREACHED()
+ << "Hardware encoder not implemented on this platform.";
+ return nullptr;
+#endif
+#endif
+}
+std::unique_ptr<VideoDecoderFactory> CreateHardwareDecoderFactory() {
+#ifdef WEBRTC_ANDROID
+ InitializeAndroidObjects();
+ return CreateAndroidDecoderFactory();
+#else
+#ifdef WEBRTC_MAC
+ return CreateObjCDecoderFactory();
+#else
+ RTC_DCHECK_NOTREACHED()
+ << "Hardware decoder not implemented on this platform.";
+ return nullptr;
+#endif
+#endif
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/hardware_codecs.h b/third_party/libwebrtc/test/scenario/hardware_codecs.h
new file mode 100644
index 0000000000..ae14a27d9e
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/hardware_codecs.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_HARDWARE_CODECS_H_
+#define TEST_SCENARIO_HARDWARE_CODECS_H_
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+namespace test {
+std::unique_ptr<VideoEncoderFactory> CreateHardwareEncoderFactory();
+std::unique_ptr<VideoDecoderFactory> CreateHardwareDecoderFactory();
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_HARDWARE_CODECS_H_
diff --git a/third_party/libwebrtc/test/scenario/network_node.cc b/third_party/libwebrtc/test/scenario/network_node.cc
new file mode 100644
index 0000000000..e149bb11e0
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/network_node.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/network_node.h"
+
+#include <algorithm>
+#include <vector>
+
+#include <memory>
+#include "rtc_base/net_helper.h"
+#include "rtc_base/numerics/safe_minmax.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr char kDummyTransportName[] = "dummy";
+SimulatedNetwork::Config CreateSimulationConfig(
+ NetworkSimulationConfig config) {
+ SimulatedNetwork::Config sim_config;
+ sim_config.link_capacity_kbps = config.bandwidth.kbps_or(0);
+ sim_config.loss_percent = config.loss_rate * 100;
+ sim_config.queue_delay_ms = config.delay.ms();
+ sim_config.delay_standard_deviation_ms = config.delay_std_dev.ms();
+ sim_config.packet_overhead = config.packet_overhead.bytes<int>();
+ sim_config.queue_length_packets =
+ config.packet_queue_length_limit.value_or(0);
+ return sim_config;
+}
+} // namespace
+
+SimulationNode::SimulationNode(NetworkSimulationConfig config,
+ SimulatedNetwork* behavior,
+ EmulatedNetworkNode* network_node)
+ : config_(config), simulation_(behavior), network_node_(network_node) {}
+
+std::unique_ptr<SimulatedNetwork> SimulationNode::CreateBehavior(
+ NetworkSimulationConfig config) {
+ SimulatedNetwork::Config sim_config = CreateSimulationConfig(config);
+ return std::make_unique<SimulatedNetwork>(sim_config);
+}
+
+void SimulationNode::UpdateConfig(
+ std::function<void(NetworkSimulationConfig*)> modifier) {
+ modifier(&config_);
+ SimulatedNetwork::Config sim_config = CreateSimulationConfig(config_);
+ simulation_->SetConfig(sim_config);
+}
+
+void SimulationNode::PauseTransmissionUntil(Timestamp until) {
+ simulation_->PauseTransmissionUntil(until.us());
+}
+
+ColumnPrinter SimulationNode::ConfigPrinter() const {
+ return ColumnPrinter::Lambda(
+ "propagation_delay capacity loss_rate",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.3lf %.0lf %.2lf", config_.delay.seconds<double>(),
+ config_.bandwidth.bps() / 8.0, config_.loss_rate);
+ });
+}
+
+NetworkNodeTransport::NetworkNodeTransport(Clock* sender_clock,
+ Call* sender_call)
+ : sender_clock_(sender_clock), sender_call_(sender_call) {}
+
+NetworkNodeTransport::~NetworkNodeTransport() = default;
+
+bool NetworkNodeTransport::SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) {
+ int64_t send_time_ms = sender_clock_->TimeInMilliseconds();
+ rtc::SentPacket sent_packet;
+ sent_packet.packet_id = options.packet_id;
+ sent_packet.info.included_in_feedback = options.included_in_feedback;
+ sent_packet.info.included_in_allocation = options.included_in_allocation;
+ sent_packet.send_time_ms = send_time_ms;
+ sent_packet.info.packet_size_bytes = length;
+ sent_packet.info.packet_type = rtc::PacketType::kData;
+ sender_call_->OnSentPacket(sent_packet);
+
+ MutexLock lock(&mutex_);
+ if (!endpoint_)
+ return false;
+ rtc::CopyOnWriteBuffer buffer(packet, length);
+ endpoint_->SendPacket(local_address_, remote_address_, buffer,
+ packet_overhead_.bytes());
+ return true;
+}
+
+bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) {
+ rtc::CopyOnWriteBuffer buffer(packet, length);
+ MutexLock lock(&mutex_);
+ if (!endpoint_)
+ return false;
+ endpoint_->SendPacket(local_address_, remote_address_, buffer,
+ packet_overhead_.bytes());
+ return true;
+}
+
+void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint,
+ const rtc::SocketAddress& receiver_address,
+ DataSize packet_overhead) {
+ rtc::NetworkRoute route;
+ route.connected = true;
+ // We assume that the address will be unique in the lower bytes.
+ route.local = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>(
+ receiver_address.ipaddr().v4AddressAsHostOrderInteger()));
+ route.remote = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>(
+ receiver_address.ipaddr().v4AddressAsHostOrderInteger()));
+ route.packet_overhead = packet_overhead.bytes() +
+ receiver_address.ipaddr().overhead() +
+ cricket::kUdpHeaderSize;
+ {
+ // Only IPv4 address is supported.
+ RTC_CHECK_EQ(receiver_address.family(), AF_INET);
+ MutexLock lock(&mutex_);
+ endpoint_ = endpoint;
+ local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0);
+ remote_address_ = receiver_address;
+ packet_overhead_ = packet_overhead;
+ current_network_route_ = route;
+ }
+
+ sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged(
+ kDummyTransportName, route);
+}
+
+void NetworkNodeTransport::Disconnect() {
+ MutexLock lock(&mutex_);
+ current_network_route_.connected = false;
+ sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged(
+ kDummyTransportName, current_network_route_);
+ current_network_route_ = {};
+ endpoint_ = nullptr;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/network_node.h b/third_party/libwebrtc/test/scenario/network_node.h
new file mode 100644
index 0000000000..fe87cefa26
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/network_node.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_NETWORK_NODE_H_
+#define TEST_SCENARIO_NETWORK_NODE_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/call/transport.h"
+#include "api/units/timestamp.h"
+#include "call/call.h"
+#include "call/simulated_network.h"
+#include "rtc_base/copy_on_write_buffer.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+class SimulationNode {
+ public:
+ SimulationNode(NetworkSimulationConfig config,
+ SimulatedNetwork* behavior,
+ EmulatedNetworkNode* network_node);
+ static std::unique_ptr<SimulatedNetwork> CreateBehavior(
+ NetworkSimulationConfig config);
+
+ void UpdateConfig(std::function<void(NetworkSimulationConfig*)> modifier);
+ void PauseTransmissionUntil(Timestamp until);
+ ColumnPrinter ConfigPrinter() const;
+ EmulatedNetworkNode* node() { return network_node_; }
+
+ private:
+ NetworkSimulationConfig config_;
+ SimulatedNetwork* const simulation_;
+ EmulatedNetworkNode* const network_node_;
+};
+
+class NetworkNodeTransport : public Transport {
+ public:
+ NetworkNodeTransport(Clock* sender_clock, Call* sender_call);
+ ~NetworkNodeTransport() override;
+
+ bool SendRtp(const uint8_t* packet,
+ size_t length,
+ const PacketOptions& options) override;
+ bool SendRtcp(const uint8_t* packet, size_t length) override;
+
+ void Connect(EmulatedEndpoint* endpoint,
+ const rtc::SocketAddress& receiver_address,
+ DataSize packet_overhead);
+ void Disconnect();
+
+ DataSize packet_overhead() {
+ MutexLock lock(&mutex_);
+ return packet_overhead_;
+ }
+
+ private:
+ Mutex mutex_;
+ Clock* const sender_clock_;
+ Call* const sender_call_;
+ EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(mutex_) = nullptr;
+ rtc::SocketAddress local_address_ RTC_GUARDED_BY(mutex_);
+ rtc::SocketAddress remote_address_ RTC_GUARDED_BY(mutex_);
+ DataSize packet_overhead_ RTC_GUARDED_BY(mutex_) = DataSize::Zero();
+ rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_);
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_NETWORK_NODE_H_
diff --git a/third_party/libwebrtc/test/scenario/performance_stats.cc b/third_party/libwebrtc/test/scenario/performance_stats.cc
new file mode 100644
index 0000000000..e12be8a003
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/performance_stats.h"
+
+#include <algorithm>
+
+namespace webrtc {
+namespace test {
+void VideoFramesStats::AddFrameInfo(const VideoFrameBuffer& frame,
+ Timestamp at_time) {
+ ++count;
+ RTC_DCHECK(at_time.IsFinite());
+ pixels.AddSample(frame.width() * frame.height());
+ resolution.AddSample(std::max(frame.width(), frame.height()));
+ frames.AddEvent(at_time);
+}
+
+void VideoFramesStats::AddStats(const VideoFramesStats& other) {
+ count += other.count;
+ pixels.AddSamples(other.pixels);
+ resolution.AddSamples(other.resolution);
+ frames.AddEvents(other.frames);
+}
+
+void VideoQualityStats::AddStats(const VideoQualityStats& other) {
+ capture.AddStats(other.capture);
+ render.AddStats(other.render);
+ lost_count += other.lost_count;
+ freeze_count += other.freeze_count;
+ capture_to_decoded_delay.AddSamples(other.capture_to_decoded_delay);
+ end_to_end_delay.AddSamples(other.end_to_end_delay);
+ psnr.AddSamples(other.psnr);
+ psnr_with_freeze.AddSamples(other.psnr_with_freeze);
+ skipped_between_rendered.AddSamples(other.skipped_between_rendered);
+ freeze_duration.AddSamples(other.freeze_duration);
+ time_between_freezes.AddSamples(other.time_between_freezes);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/performance_stats.h b/third_party/libwebrtc/test/scenario/performance_stats.h
new file mode 100644
index 0000000000..6974ab6d22
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_PERFORMANCE_STATS_H_
+#define TEST_SCENARIO_PERFORMANCE_STATS_H_
+
+#include "api/units/data_rate.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/numerics/event_rate_counter.h"
+#include "rtc_base/numerics/sample_stats.h"
+
+namespace webrtc {
+namespace test {
+
+struct VideoFramePair {
+ rtc::scoped_refptr<VideoFrameBuffer> captured;
+ rtc::scoped_refptr<VideoFrameBuffer> decoded;
+ Timestamp capture_time = Timestamp::MinusInfinity();
+ Timestamp decoded_time = Timestamp::PlusInfinity();
+ Timestamp render_time = Timestamp::PlusInfinity();
+ // A unique identifier for the spatial/temporal layer the decoded frame
+ // belongs to. Note that this does not reflect the id as defined by the
+ // underlying layer setup.
+ int layer_id = 0;
+ int capture_id = 0;
+ int decode_id = 0;
+ // Indicates the repeat count for the decoded frame. Meaning that the same
+ // decoded frame has matched differend captured frames.
+ int repeated = 0;
+};
+
+
+struct VideoFramesStats {
+ int count = 0;
+ SampleStats<double> pixels;
+ SampleStats<double> resolution;
+ EventRateCounter frames;
+ void AddFrameInfo(const VideoFrameBuffer& frame, Timestamp at_time);
+ void AddStats(const VideoFramesStats& other);
+};
+
+struct VideoQualityStats {
+ int lost_count = 0;
+ int freeze_count = 0;
+ VideoFramesStats capture;
+ VideoFramesStats render;
+ // Time from frame was captured on device to time frame was delivered from
+ // decoder.
+ SampleStats<TimeDelta> capture_to_decoded_delay;
+ // Time from frame was captured on device to time frame was displayed on
+ // device.
+ SampleStats<TimeDelta> end_to_end_delay;
+ // PSNR for delivered frames. Note that this might go up for a worse
+ // connection due to frame dropping.
+ SampleStats<double> psnr;
+ // PSNR for all frames, dropped or lost frames are compared to the last
+ // successfully delivered frame
+ SampleStats<double> psnr_with_freeze;
+ // Frames skipped between two nearest.
+ SampleStats<double> skipped_between_rendered;
+ // In the next 2 metrics freeze is a pause that is longer, than maximum:
+ // 1. 150ms
+ // 2. 3 * average time between two sequential frames.
+ // Item 1 will cover high fps video and is a duration, that is noticeable by
+ // human eye. Item 2 will cover low fps video like screen sharing.
+ SampleStats<TimeDelta> freeze_duration;
+ // Mean time between one freeze end and next freeze start.
+ SampleStats<TimeDelta> time_between_freezes;
+ void AddStats(const VideoQualityStats& other);
+};
+
+struct CollectedCallStats {
+ SampleStats<DataRate> target_rate;
+ SampleStats<TimeDelta> pacer_delay;
+ SampleStats<TimeDelta> round_trip_time;
+ SampleStats<double> memory_usage;
+};
+
+struct CollectedAudioReceiveStats {
+ SampleStats<double> expand_rate;
+ SampleStats<double> accelerate_rate;
+ SampleStats<TimeDelta> jitter_buffer;
+};
+struct CollectedVideoSendStats {
+ SampleStats<double> encode_frame_rate;
+ SampleStats<TimeDelta> encode_time;
+ SampleStats<double> encode_usage;
+ SampleStats<DataRate> media_bitrate;
+ SampleStats<DataRate> fec_bitrate;
+};
+struct CollectedVideoReceiveStats {
+ SampleStats<TimeDelta> decode_time;
+ SampleStats<TimeDelta> decode_time_max;
+ SampleStats<double> decode_pixels;
+ SampleStats<double> resolution;
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_PERFORMANCE_STATS_H_
diff --git a/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc b/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc
new file mode 100644
index 0000000000..8d87c87745
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/performance_stats_unittest.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/performance_stats.h"
+
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+TEST(EventRateCounter, ReturnsCorrectTotalDuration) {
+ EventRateCounter event_rate_counter;
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero());
+ event_rate_counter.AddEvent(Timestamp::Seconds(1));
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Zero());
+ event_rate_counter.AddEvent(Timestamp::Seconds(2));
+ EXPECT_EQ(event_rate_counter.TotalDuration(), TimeDelta::Seconds(1));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/probing_test.cc b/third_party/libwebrtc/test/scenario/probing_test.cc
new file mode 100644
index 0000000000..86653ced9b
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/probing_test.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+
+TEST(ProbingTest, InitialProbingRampsUpTargetRateWhenNetworkIsGood) {
+ Scenario s;
+ NetworkSimulationConfig good_network;
+ good_network.bandwidth = DataRate::KilobitsPerSec(2000);
+
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ CallClientConfig send_config;
+ auto* caller = s.CreateClient("caller", send_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto route =
+ s.CreateRoutes(caller, {s.CreateSimulationNode(good_network)}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), video_config);
+
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ 3 * send_config.transport.rates.start_rate);
+}
+
+TEST(ProbingTest, MidCallProbingRampupTriggeredByUpdatedBitrateConstraints) {
+ Scenario s;
+
+ const DataRate kStartRate = DataRate::KilobitsPerSec(300);
+ const DataRate kConstrainedRate = DataRate::KilobitsPerSec(100);
+ const DataRate kHighRate = DataRate::KilobitsPerSec(1500);
+
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ CallClientConfig send_call_config;
+ send_call_config.transport.rates.start_rate = kStartRate;
+ send_call_config.transport.rates.max_rate = kHighRate * 2;
+ auto* caller = s.CreateClient("caller", send_call_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto route = s.CreateRoutes(
+ caller, {s.CreateSimulationNode(NetworkSimulationConfig())}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), video_config);
+
+ // Wait until initial probing rampup is done and then set a low max bitrate.
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ 5 * send_call_config.transport.rates.start_rate);
+ BitrateConstraints bitrate_config;
+ bitrate_config.max_bitrate_bps = kConstrainedRate.bps();
+ caller->UpdateBitrateConstraints(bitrate_config);
+
+ // Wait until the low send bitrate has taken effect, and then set a much
+ // higher max bitrate.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_LT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kConstrainedRate * 1.1);
+ bitrate_config.max_bitrate_bps = 2 * kHighRate.bps();
+ caller->UpdateBitrateConstraints(bitrate_config);
+
+ // Check that the max send bitrate is reached quicker than would be possible
+ // with simple AIMD rate control.
+ s.RunFor(TimeDelta::Seconds(1));
+ EXPECT_GE(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kHighRate);
+}
+
+TEST(ProbingTest, ProbesRampsUpWhenVideoEncoderConfigChanges) {
+ Scenario s;
+ const DataRate kStartRate = DataRate::KilobitsPerSec(50);
+ const DataRate kHdRate = DataRate::KilobitsPerSec(3250);
+
+ // Set up 3-layer simulcast.
+ VideoStreamConfig video_config;
+ video_config.encoder.codec =
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ video_config.encoder.simulcast_streams = {webrtc::ScalabilityMode::kL1T3,
+ webrtc::ScalabilityMode::kL1T3,
+ webrtc::ScalabilityMode::kL1T3};
+ video_config.source.generator.width = 1280;
+ video_config.source.generator.height = 720;
+
+ CallClientConfig send_call_config;
+ send_call_config.transport.rates.start_rate = kStartRate;
+ send_call_config.transport.rates.max_rate = kHdRate * 2;
+ auto* caller = s.CreateClient("caller", send_call_config);
+ auto* callee = s.CreateClient("callee", CallClientConfig());
+ auto send_net =
+ s.CreateMutableSimulationNode([&](NetworkSimulationConfig* c) {
+ c->bandwidth = DataRate::KilobitsPerSec(200);
+ });
+ auto route =
+ s.CreateRoutes(caller, {send_net->node()}, callee,
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto* video_stream = s.CreateVideoStream(route->forward(), video_config);
+
+ // Only QVGA enabled initially. Run until initial probing is done and BWE
+ // has settled.
+ video_stream->send()->UpdateActiveLayers({true, false, false});
+ s.RunFor(TimeDelta::Seconds(2));
+
+ // Remove network constraints and run for a while more, BWE should be much
+ // less than required HD rate.
+ send_net->UpdateConfig([&](NetworkSimulationConfig* c) {
+ c->bandwidth = DataRate::PlusInfinity();
+ });
+ s.RunFor(TimeDelta::Seconds(2));
+
+ DataRate bandwidth =
+ DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps);
+ EXPECT_LT(bandwidth, kHdRate / 4);
+
+ // Enable all layers, triggering a probe.
+ video_stream->send()->UpdateActiveLayers({true, true, true});
+
+ // Run for a short while and verify BWE has ramped up fast.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_GT(DataRate::BitsPerSec(caller->GetStats().send_bandwidth_bps),
+ kHdRate);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario.cc b/third_party/libwebrtc/test/scenario/scenario.cc
new file mode 100644
index 0000000000..93377120a1
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "absl/flags/flag.h"
+#include "absl/flags/parse.h"
+#include "absl/strings/string_view.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "rtc_base/socket_address.h"
+#include "test/logging/file_log_writer.h"
+#include "test/network/network_emulation.h"
+#include "test/scenario/video_stream.h"
+#include "test/testsupport/file_utils.h"
+
+ABSL_FLAG(bool, scenario_logs, false, "Save logs from scenario framework.");
+ABSL_FLAG(std::string,
+ scenario_logs_root,
+ "",
+ "Output root path, based on project root if unset.");
+
+namespace webrtc {
+namespace test {
+namespace {
+
+std::unique_ptr<FileLogWriterFactory> GetScenarioLogManager(
+ absl::string_view file_name) {
+ if (absl::GetFlag(FLAGS_scenario_logs) && !file_name.empty()) {
+ std::string output_root = absl::GetFlag(FLAGS_scenario_logs_root);
+ if (output_root.empty())
+ output_root = OutputPath() + "output_data/";
+
+ auto base_filename = output_root + std::string(file_name) + ".";
+ RTC_LOG(LS_INFO) << "Saving scenario logs to: " << base_filename;
+ return std::make_unique<FileLogWriterFactory>(base_filename);
+ }
+ return nullptr;
+}
+} // namespace
+
+Scenario::Scenario()
+ : Scenario(std::unique_ptr<LogWriterFactoryInterface>(),
+ /*real_time=*/false) {}
+
+Scenario::Scenario(const testing::TestInfo* test_info)
+ : Scenario(std::string(test_info->test_suite_name()) + "/" +
+ test_info->name()) {}
+
+Scenario::Scenario(absl::string_view file_name)
+ : Scenario(file_name, /*real_time=*/false) {}
+
+Scenario::Scenario(absl::string_view file_name, bool real_time)
+ : Scenario(GetScenarioLogManager(file_name), real_time) {}
+
+Scenario::Scenario(
+ std::unique_ptr<LogWriterFactoryInterface> log_writer_factory,
+ bool real_time)
+ : log_writer_factory_(std::move(log_writer_factory)),
+ network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated,
+ EmulatedNetworkStatsGatheringMode::kDefault),
+ clock_(network_manager_.time_controller()->GetClock()),
+ audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()),
+ audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()),
+ task_queue_(network_manager_.time_controller()
+ ->GetTaskQueueFactory()
+ ->CreateTaskQueue("Scenario",
+ TaskQueueFactory::Priority::NORMAL)) {}
+
+Scenario::~Scenario() {
+ if (start_time_.IsFinite())
+ Stop();
+ for (auto& call_client : clients_) {
+ call_client->transport_->Disconnect();
+ call_client->UnBind();
+ }
+}
+
+ColumnPrinter Scenario::TimePrinter() {
+ return ColumnPrinter::Lambda(
+ "time",
+ [this](rtc::SimpleStringBuilder& sb) {
+ sb.AppendFormat("%.3lf", Now().seconds<double>());
+ },
+ 32);
+}
+
+StatesPrinter* Scenario::CreatePrinter(absl::string_view name,
+ TimeDelta interval,
+ std::vector<ColumnPrinter> printers) {
+ std::vector<ColumnPrinter> all_printers{TimePrinter()};
+ for (auto& printer : printers)
+ all_printers.push_back(printer);
+ StatesPrinter* printer = new StatesPrinter(GetLogWriter(name), all_printers);
+ printers_.emplace_back(printer);
+ printer->PrintHeaders();
+ if (interval.IsFinite())
+ Every(interval, [printer] { printer->PrintRow(); });
+ return printer;
+}
+
+CallClient* Scenario::CreateClient(absl::string_view name,
+ CallClientConfig config) {
+ CallClient* client = new CallClient(network_manager_.time_controller(),
+ GetLogWriterFactory(name), config);
+ if (config.transport.state_log_interval.IsFinite()) {
+ Every(config.transport.state_log_interval, [this, client]() {
+ client->network_controller_factory_.LogCongestionControllerStats(Now());
+ });
+ }
+ clients_.emplace_back(client);
+ return client;
+}
+
+CallClient* Scenario::CreateClient(
+ absl::string_view name,
+ std::function<void(CallClientConfig*)> config_modifier) {
+ CallClientConfig config;
+ config_modifier(&config);
+ return CreateClient(name, config);
+}
+
+CallClientPair* Scenario::CreateRoutes(
+ CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link) {
+ return CreateRoutes(first, send_link,
+ DataSize::Bytes(PacketOverhead::kDefault), second,
+ return_link, DataSize::Bytes(PacketOverhead::kDefault));
+}
+
+CallClientPair* Scenario::CreateRoutes(
+ CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ DataSize first_overhead,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link,
+ DataSize second_overhead) {
+ CallClientPair* client_pair = new CallClientPair(first, second);
+ ChangeRoute(client_pair->forward(), send_link, first_overhead);
+ ChangeRoute(client_pair->reverse(), return_link, second_overhead);
+ client_pairs_.emplace_back(client_pair);
+ return client_pair;
+}
+
+void Scenario::ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes) {
+ ChangeRoute(clients, over_nodes, DataSize::Bytes(PacketOverhead::kDefault));
+}
+
+void Scenario::ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ DataSize overhead) {
+ EmulatedRoute* route = network_manager_.CreateRoute(over_nodes);
+ uint16_t port = clients.second->Bind(route->to);
+ auto addr = rtc::SocketAddress(route->to->GetPeerLocalAddress(), port);
+ clients.first->transport_->Connect(route->from, addr, overhead);
+}
+
+EmulatedNetworkNode* Scenario::CreateSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier) {
+ NetworkSimulationConfig config;
+ config_modifier(&config);
+ return CreateSimulationNode(config);
+}
+
+EmulatedNetworkNode* Scenario::CreateSimulationNode(
+ NetworkSimulationConfig config) {
+ return network_manager_.CreateEmulatedNode(
+ SimulationNode::CreateBehavior(config));
+}
+
+SimulationNode* Scenario::CreateMutableSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier) {
+ NetworkSimulationConfig config;
+ config_modifier(&config);
+ return CreateMutableSimulationNode(config);
+}
+
+SimulationNode* Scenario::CreateMutableSimulationNode(
+ NetworkSimulationConfig config) {
+ std::unique_ptr<SimulatedNetwork> behavior =
+ SimulationNode::CreateBehavior(config);
+ SimulatedNetwork* behavior_ptr = behavior.get();
+ auto* emulated_node =
+ network_manager_.CreateEmulatedNode(std::move(behavior));
+ simulation_nodes_.emplace_back(
+ new SimulationNode(config, behavior_ptr, emulated_node));
+ return simulation_nodes_.back().get();
+}
+
+void Scenario::TriggerPacketBurst(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t num_packets,
+ size_t packet_size) {
+ network_manager_.CreateCrossTrafficRoute(over_nodes)
+ ->TriggerPacketBurst(num_packets, packet_size);
+}
+
+void Scenario::NetworkDelayedAction(
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t packet_size,
+ std::function<void()> action) {
+ network_manager_.CreateCrossTrafficRoute(over_nodes)
+ ->NetworkDelayedAction(packet_size, action);
+}
+
+VideoStreamPair* Scenario::CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(VideoStreamConfig*)> config_modifier) {
+ VideoStreamConfig config;
+ config_modifier(&config);
+ return CreateVideoStream(clients, config);
+}
+
+VideoStreamPair* Scenario::CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ VideoStreamConfig config) {
+ std::vector<RtpExtension> extensions = GetVideoRtpExtensions(config);
+ clients.first->SetVideoReceiveRtpHeaderExtensions(extensions);
+ clients.second->SetVideoReceiveRtpHeaderExtensions(extensions);
+ video_streams_.emplace_back(
+ new VideoStreamPair(clients.first, clients.second, config));
+ return video_streams_.back().get();
+}
+
+AudioStreamPair* Scenario::CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(AudioStreamConfig*)> config_modifier) {
+ AudioStreamConfig config;
+ config_modifier(&config);
+ return CreateAudioStream(clients, config);
+}
+
+AudioStreamPair* Scenario::CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ AudioStreamConfig config) {
+ std::vector<RtpExtension> extensions = GetAudioRtpExtensions(config);
+ clients.first->SetAudioReceiveRtpHeaderExtensions(extensions);
+ clients.second->SetAudioReceiveRtpHeaderExtensions(extensions);
+ audio_streams_.emplace_back(
+ new AudioStreamPair(clients.first, audio_encoder_factory_, clients.second,
+ audio_decoder_factory_, config));
+ return audio_streams_.back().get();
+}
+
+void Scenario::Every(TimeDelta interval,
+ absl::AnyInvocable<void(TimeDelta)> function) {
+ RepeatingTaskHandle::DelayedStart(
+ task_queue_.get(), interval,
+ [interval, function = std::move(function)]() mutable {
+ function(interval);
+ return interval;
+ });
+}
+
+void Scenario::Every(TimeDelta interval, absl::AnyInvocable<void()> function) {
+ RepeatingTaskHandle::DelayedStart(
+ task_queue_.get(), interval,
+ [interval, function = std::move(function)]() mutable {
+ function();
+ return interval;
+ });
+}
+
+void Scenario::Post(absl::AnyInvocable<void() &&> function) {
+ task_queue_->PostTask(std::move(function));
+}
+
+void Scenario::At(TimeDelta offset, absl::AnyInvocable<void() &&> function) {
+ RTC_DCHECK_GT(offset, TimeSinceStart());
+ task_queue_->PostDelayedTask(std::move(function), TimeUntilTarget(offset));
+}
+
+void Scenario::RunFor(TimeDelta duration) {
+ if (start_time_.IsInfinite())
+ Start();
+ network_manager_.time_controller()->AdvanceTime(duration);
+}
+
+void Scenario::RunUntil(TimeDelta target_time_since_start) {
+ RunFor(TimeUntilTarget(target_time_since_start));
+}
+
+void Scenario::RunUntil(TimeDelta target_time_since_start,
+ TimeDelta check_interval,
+ std::function<bool()> exit_function) {
+ if (start_time_.IsInfinite())
+ Start();
+ while (check_interval >= TimeUntilTarget(target_time_since_start)) {
+ network_manager_.time_controller()->AdvanceTime(check_interval);
+ if (exit_function())
+ return;
+ }
+ network_manager_.time_controller()->AdvanceTime(
+ TimeUntilTarget(target_time_since_start));
+}
+
+void Scenario::Start() {
+ start_time_ = clock_->CurrentTime();
+ for (auto& stream_pair : video_streams_)
+ stream_pair->receive()->Start();
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->receive()->Start();
+ for (auto& stream_pair : video_streams_) {
+ if (stream_pair->config_.autostart) {
+ stream_pair->send()->Start();
+ }
+ }
+ for (auto& stream_pair : audio_streams_) {
+ if (stream_pair->config_.autostart) {
+ stream_pair->send()->Start();
+ }
+ }
+}
+
+void Scenario::Stop() {
+ RTC_DCHECK(start_time_.IsFinite());
+ for (auto& stream_pair : video_streams_) {
+ stream_pair->send()->Stop();
+ }
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->send()->Stop();
+ for (auto& stream_pair : video_streams_)
+ stream_pair->receive()->Stop();
+ for (auto& stream_pair : audio_streams_)
+ stream_pair->receive()->Stop();
+ start_time_ = Timestamp::PlusInfinity();
+}
+
+Timestamp Scenario::Now() {
+ return clock_->CurrentTime();
+}
+
+TimeDelta Scenario::TimeSinceStart() {
+ if (start_time_.IsInfinite())
+ return TimeDelta::Zero();
+ return Now() - start_time_;
+}
+
+TimeDelta Scenario::TimeUntilTarget(TimeDelta target_time_offset) {
+ return target_time_offset - TimeSinceStart();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario.h b/third_party/libwebrtc/test/scenario/scenario.h
new file mode 100644
index 0000000000..cad9210002
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_SCENARIO_H_
+#define TEST_SCENARIO_SCENARIO_H_
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "absl/functional/any_invocable.h"
+#include "absl/strings/string_view.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/test/time_controller.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "test/gtest.h"
+#include "test/logging/log_writer.h"
+#include "test/network/network_emulation_manager.h"
+#include "test/scenario/audio_stream.h"
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+#include "test/scenario/video_stream.h"
+
+namespace webrtc {
+namespace test {
+// Scenario is a class owning everything for a test scenario. It creates and
+// holds network nodes, call clients and media streams. It also provides methods
+// for changing behavior at runtime. Since it always keeps ownership of the
+// created components, it generally returns non-owning pointers. It maintains
+// the life of its objects until it is destroyed.
+// For methods accepting configuration structs, a modifier function interface is
+// generally provided. This allows simple partial overriding of the default
+// configuration.
+class Scenario {
+ public:
+ Scenario();
+ explicit Scenario(const testing::TestInfo* test_info);
+ explicit Scenario(absl::string_view file_name);
+ Scenario(absl::string_view file_name, bool real_time);
+ Scenario(std::unique_ptr<LogWriterFactoryInterface> log_writer_manager,
+ bool real_time);
+
+ ~Scenario();
+
+ Scenario(const Scenario&) = delete;
+ Scenario& operator=(const Scenario&) = delete;
+
+ NetworkEmulationManagerImpl* net() { return &network_manager_; }
+
+ EmulatedNetworkNode* CreateSimulationNode(NetworkSimulationConfig config);
+ EmulatedNetworkNode* CreateSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier);
+
+ SimulationNode* CreateMutableSimulationNode(NetworkSimulationConfig config);
+ SimulationNode* CreateMutableSimulationNode(
+ std::function<void(NetworkSimulationConfig*)> config_modifier);
+
+ CallClient* CreateClient(absl::string_view name, CallClientConfig config);
+ CallClient* CreateClient(
+ absl::string_view name,
+ std::function<void(CallClientConfig*)> config_modifier);
+
+ CallClientPair* CreateRoutes(CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link);
+
+ CallClientPair* CreateRoutes(CallClient* first,
+ std::vector<EmulatedNetworkNode*> send_link,
+ DataSize first_overhead,
+ CallClient* second,
+ std::vector<EmulatedNetworkNode*> return_link,
+ DataSize second_overhead);
+
+ void ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes);
+
+ void ChangeRoute(std::pair<CallClient*, CallClient*> clients,
+ std::vector<EmulatedNetworkNode*> over_nodes,
+ DataSize overhead);
+
+ VideoStreamPair* CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(VideoStreamConfig*)> config_modifier);
+ VideoStreamPair* CreateVideoStream(
+ std::pair<CallClient*, CallClient*> clients,
+ VideoStreamConfig config);
+
+ AudioStreamPair* CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ std::function<void(AudioStreamConfig*)> config_modifier);
+ AudioStreamPair* CreateAudioStream(
+ std::pair<CallClient*, CallClient*> clients,
+ AudioStreamConfig config);
+
+ // Runs the provided function with a fixed interval. For real time tests,
+ // `function` starts being called after `interval` from the call to Every().
+ void Every(TimeDelta interval, absl::AnyInvocable<void(TimeDelta)> function);
+ void Every(TimeDelta interval, absl::AnyInvocable<void()> function);
+
+ // Runs the provided function on the internal task queue. This ensure that
+ // it's run on the main thread for simulated time tests.
+ void Post(absl::AnyInvocable<void() &&> function);
+
+ // Runs the provided function after given duration has passed. For real time
+ // tests, `function` is called after `target_time_since_start` from the call
+ // to Every().
+ void At(TimeDelta offset, absl::AnyInvocable<void() &&> function);
+
+ // Sends a packet over the nodes and runs `action` when it has been delivered.
+ void NetworkDelayedAction(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t packet_size,
+ std::function<void()> action);
+
+ // Runs the scenario for the given time.
+ void RunFor(TimeDelta duration);
+ // Runs the scenario until `target_time_since_start`.
+ void RunUntil(TimeDelta target_time_since_start);
+ // Runs the scenario until `target_time_since_start` or `exit_function`
+ // returns true. `exit_function` is polled after each `check_interval` has
+ // passed.
+ void RunUntil(TimeDelta target_time_since_start,
+ TimeDelta check_interval,
+ std::function<bool()> exit_function);
+ void Start();
+ void Stop();
+
+ // Triggers sending of dummy packets over the given nodes.
+ void TriggerPacketBurst(std::vector<EmulatedNetworkNode*> over_nodes,
+ size_t num_packets,
+ size_t packet_size);
+
+ ColumnPrinter TimePrinter();
+ StatesPrinter* CreatePrinter(absl::string_view name,
+ TimeDelta interval,
+ std::vector<ColumnPrinter> printers);
+
+ // Returns the current time.
+ Timestamp Now();
+ // Return the duration of the current session so far.
+ TimeDelta TimeSinceStart();
+
+ std::unique_ptr<RtcEventLogOutput> GetLogWriter(absl::string_view name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return log_writer_factory_->Create(name);
+ }
+ std::unique_ptr<LogWriterFactoryInterface> GetLogWriterFactory(
+ absl::string_view name) {
+ if (!log_writer_factory_ || name.empty())
+ return nullptr;
+ return std::make_unique<LogWriterFactoryAddPrefix>(
+ log_writer_factory_.get(), name);
+ }
+
+ private:
+ TimeDelta TimeUntilTarget(TimeDelta target_time_offset);
+
+ const std::unique_ptr<LogWriterFactoryInterface> log_writer_factory_;
+ NetworkEmulationManagerImpl network_manager_;
+ Clock* clock_;
+
+ std::vector<std::unique_ptr<CallClient>> clients_;
+ std::vector<std::unique_ptr<CallClientPair>> client_pairs_;
+ std::vector<std::unique_ptr<VideoStreamPair>> video_streams_;
+ std::vector<std::unique_ptr<AudioStreamPair>> audio_streams_;
+ std::vector<std::unique_ptr<SimulationNode>> simulation_nodes_;
+ std::vector<std::unique_ptr<StatesPrinter>> printers_;
+
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory_;
+ rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory_;
+
+ Timestamp start_time_ = Timestamp::PlusInfinity();
+ // Defined last so it's destroyed first.
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_SCENARIO_H_
diff --git a/third_party/libwebrtc/test/scenario/scenario_config.cc b/third_party/libwebrtc/test/scenario/scenario_config.cc
new file mode 100644
index 0000000000..3f8a70a162
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_config.cc
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario_config.h"
+
+namespace webrtc {
+namespace test {
+
+TransportControllerConfig::Rates::Rates() = default;
+TransportControllerConfig::Rates::Rates(
+ const TransportControllerConfig::Rates&) = default;
+TransportControllerConfig::Rates::~Rates() = default;
+
+PacketStreamConfig::PacketStreamConfig() = default;
+PacketStreamConfig::PacketStreamConfig(const PacketStreamConfig&) = default;
+PacketStreamConfig::~PacketStreamConfig() = default;
+
+VideoStreamConfig::Encoder::Encoder() = default;
+VideoStreamConfig::Encoder::Encoder(const VideoStreamConfig::Encoder&) =
+ default;
+VideoStreamConfig::Encoder::~Encoder() = default;
+
+VideoStreamConfig::Stream::Stream() = default;
+VideoStreamConfig::Stream::Stream(const VideoStreamConfig::Stream&) = default;
+VideoStreamConfig::Stream::~Stream() = default;
+
+AudioStreamConfig::AudioStreamConfig() = default;
+AudioStreamConfig::AudioStreamConfig(const AudioStreamConfig&) = default;
+AudioStreamConfig::~AudioStreamConfig() = default;
+
+AudioStreamConfig::Encoder::Encoder() = default;
+AudioStreamConfig::Encoder::Encoder(const AudioStreamConfig::Encoder&) =
+ default;
+AudioStreamConfig::Encoder::~Encoder() = default;
+
+AudioStreamConfig::Stream::Stream() = default;
+AudioStreamConfig::Stream::Stream(const AudioStreamConfig::Stream&) = default;
+AudioStreamConfig::Stream::~Stream() = default;
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/scenario_config.h b/third_party/libwebrtc/test/scenario/scenario_config.h
new file mode 100644
index 0000000000..9ce99401d7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_config.h
@@ -0,0 +1,231 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_SCENARIO_CONFIG_H_
+#define TEST_SCENARIO_SCENARIO_CONFIG_H_
+
+#include <stddef.h>
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/fec_controller.h"
+#include "api/rtp_parameters.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/transport/network_control.h"
+#include "api/units/data_rate.h"
+#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_codec_type.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+struct PacketOverhead {
+ static constexpr size_t kSrtp = 10;
+ static constexpr size_t kStun = 4;
+ // TURN messages can be sent either with or without an establieshed channel.
+ // In the latter case, a TURN Send/Data Indication is sent which has
+ // significantly more overhead.
+ static constexpr size_t kTurnChannelMessage = 4;
+ static constexpr size_t kTurnIndicationMessage = 36;
+ static constexpr size_t kDefault = kSrtp;
+};
+struct TransportControllerConfig {
+ struct Rates {
+ Rates();
+ Rates(const Rates&);
+ ~Rates();
+ DataRate min_rate = DataRate::KilobitsPerSec(30);
+ DataRate max_rate = DataRate::KilobitsPerSec(3000);
+ DataRate start_rate = DataRate::KilobitsPerSec(300);
+ } rates;
+ NetworkControllerFactoryInterface* cc_factory = nullptr;
+ TimeDelta state_log_interval = TimeDelta::Millis(100);
+};
+
+struct CallClientConfig {
+ TransportControllerConfig transport;
+ // Allows the pacer to send out multiple packets in a burst.
+ // The number of bites that can be sent in one burst is pacer_burst_interval *
+ // current bwe. 40ms is the default Chrome setting.
+ TimeDelta pacer_burst_interval = TimeDelta::Millis(40);
+ const FieldTrialsView* field_trials = nullptr;
+};
+
+struct PacketStreamConfig {
+ PacketStreamConfig();
+ PacketStreamConfig(const PacketStreamConfig&);
+ ~PacketStreamConfig();
+ int frame_rate = 30;
+ DataRate max_data_rate = DataRate::Infinity();
+ DataSize max_packet_size = DataSize::Bytes(1400);
+ DataSize min_frame_size = DataSize::Bytes(100);
+ double keyframe_multiplier = 1;
+ DataSize packet_overhead = DataSize::Bytes(PacketOverhead::kDefault);
+};
+
+struct VideoStreamConfig {
+ bool autostart = true;
+ struct Source {
+ enum Capture {
+ kGenerator,
+ kVideoFile,
+ kGenerateSlides,
+ kImageSlides,
+ // Support for explicit frame triggers should be added here if needed.
+ } capture = Capture::kGenerator;
+ struct Slides {
+ TimeDelta change_interval = TimeDelta::Seconds(10);
+ struct Generator {
+ int width = 1600;
+ int height = 1200;
+ } generator;
+ struct Images {
+ struct Crop {
+ TimeDelta scroll_duration = TimeDelta::Seconds(0);
+ absl::optional<int> width;
+ absl::optional<int> height;
+ } crop;
+ int width = 1850;
+ int height = 1110;
+ std::vector<std::string> paths = {
+ "web_screenshot_1850_1110",
+ "presentation_1850_1110",
+ "photo_1850_1110",
+ "difficult_photo_1850_1110",
+ };
+ } images;
+ } slides;
+ struct Generator {
+ using PixelFormat = FrameGeneratorInterface::OutputType;
+ PixelFormat pixel_format = PixelFormat::kI420;
+ int width = 320;
+ int height = 180;
+ } generator;
+ struct VideoFile {
+ std::string name;
+ // Must be set to width and height of the source video file.
+ int width = 0;
+ int height = 0;
+ } video_file;
+ int framerate = 30;
+ } source;
+ struct Encoder {
+ Encoder();
+ Encoder(const Encoder&);
+ ~Encoder();
+ enum class ContentType {
+ kVideo,
+ kScreen,
+ } content_type = ContentType::kVideo;
+ enum Implementation { kFake, kSoftware, kHardware } implementation = kFake;
+ struct Fake {
+ DataRate max_rate = DataRate::Infinity();
+ } fake;
+
+ using Codec = VideoCodecType;
+ Codec codec = Codec::kVideoCodecGeneric;
+ absl::optional<DataRate> max_data_rate;
+ absl::optional<DataRate> min_data_rate;
+ absl::optional<int> max_framerate;
+ // Counted in frame count.
+ absl::optional<int> key_frame_interval = 3000;
+ bool frame_dropping = true;
+ struct SingleLayer {
+ bool denoising = true;
+ bool automatic_scaling = true;
+ } single;
+ std::vector<webrtc::ScalabilityMode> simulcast_streams = {
+ webrtc::ScalabilityMode::kL1T1};
+
+ DegradationPreference degradation_preference =
+ DegradationPreference::MAINTAIN_FRAMERATE;
+ bool suspend_below_min_bitrate = false;
+ } encoder;
+ struct Stream {
+ Stream();
+ Stream(const Stream&);
+ ~Stream();
+ bool abs_send_time = false;
+ bool packet_feedback = true;
+ bool use_rtx = true;
+ DataRate pad_to_rate = DataRate::Zero();
+ TimeDelta nack_history_time = TimeDelta::Millis(1000);
+ bool use_flexfec = false;
+ bool use_ulpfec = false;
+ FecControllerFactoryInterface* fec_controller_factory = nullptr;
+ } stream;
+ struct Rendering {
+ enum Type { kFake } type = kFake;
+ std::string sync_group;
+ } render;
+ struct Hooks {
+ std::vector<std::function<void(const VideoFramePair&)>> frame_pair_handlers;
+ } hooks;
+};
+
+struct AudioStreamConfig {
+ AudioStreamConfig();
+ AudioStreamConfig(const AudioStreamConfig&);
+ ~AudioStreamConfig();
+ bool autostart = true;
+ struct Source {
+ int channels = 1;
+ } source;
+ bool network_adaptation = false;
+ struct NetworkAdaptation {
+ struct FrameLength {
+ double min_packet_loss_for_decrease = 0;
+ double max_packet_loss_for_increase = 1;
+ DataRate min_rate_for_20_ms = DataRate::Zero();
+ DataRate max_rate_for_60_ms = DataRate::Infinity();
+ DataRate min_rate_for_60_ms = DataRate::Zero();
+ DataRate max_rate_for_120_ms = DataRate::Infinity();
+ } frame;
+ std::string binary_proto;
+ } adapt;
+ struct Encoder {
+ Encoder();
+ Encoder(const Encoder&);
+ ~Encoder();
+ bool allocate_bitrate = false;
+ bool enable_dtx = false;
+ DataRate fixed_rate = DataRate::KilobitsPerSec(32);
+ // Overrides fixed rate.
+ absl::optional<DataRate> min_rate;
+ absl::optional<DataRate> max_rate;
+ TimeDelta initial_frame_length = TimeDelta::Millis(20);
+ } encoder;
+ struct Stream {
+ Stream();
+ Stream(const Stream&);
+ ~Stream();
+ bool abs_send_time = true;
+ bool in_bandwidth_estimation = true;
+ } stream;
+ struct Rendering {
+ std::string sync_group;
+ } render;
+};
+
+// TODO(srte): Merge this with BuiltInNetworkBehaviorConfig.
+struct NetworkSimulationConfig {
+ DataRate bandwidth = DataRate::Infinity();
+ TimeDelta delay = TimeDelta::Zero();
+ TimeDelta delay_std_dev = TimeDelta::Zero();
+ double loss_rate = 0;
+ absl::optional<int> packet_queue_length_limit;
+ DataSize packet_overhead = DataSize::Zero();
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_SCENARIO_CONFIG_H_
diff --git a/third_party/libwebrtc/test/scenario/scenario_unittest.cc b/third_party/libwebrtc/test/scenario/scenario_unittest.cc
new file mode 100644
index 0000000000..6861151a2d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/scenario_unittest.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/scenario.h"
+
+#include <atomic>
+
+#include "api/test/network_emulation/create_cross_traffic.h"
+#include "api/test/network_emulation/cross_traffic.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/logging/memory_log_writer.h"
+#include "test/scenario/stats_collection.h"
+
+namespace webrtc {
+namespace test {
+TEST(ScenarioTest, StartsAndStopsWithoutErrors) {
+ std::atomic<bool> packet_received(false);
+ std::atomic<bool> bitrate_changed(false);
+ Scenario s;
+ CallClientConfig call_client_config;
+ call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300);
+ auto* alice = s.CreateClient("alice", call_client_config);
+ auto* bob = s.CreateClient("bob", call_client_config);
+ NetworkSimulationConfig network_config;
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+
+ VideoStreamConfig video_stream_config;
+ s.CreateVideoStream(route->forward(), video_stream_config);
+ s.CreateVideoStream(route->reverse(), video_stream_config);
+
+ AudioStreamConfig audio_stream_config;
+ audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6);
+ audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64);
+ audio_stream_config.encoder.allocate_bitrate = true;
+ audio_stream_config.stream.in_bandwidth_estimation = false;
+ s.CreateAudioStream(route->forward(), audio_stream_config);
+ s.CreateAudioStream(route->reverse(), audio_stream_config);
+
+ RandomWalkConfig cross_traffic_config;
+ s.net()->StartCrossTraffic(CreateRandomWalkCrossTraffic(
+ s.net()->CreateCrossTrafficRoute({alice_net}), cross_traffic_config));
+
+ s.NetworkDelayedAction({alice_net, bob_net}, 100,
+ [&packet_received] { packet_received = true; });
+ s.Every(TimeDelta::Millis(10), [alice, bob, &bitrate_changed] {
+ if (alice->GetStats().send_bandwidth_bps != 300000 &&
+ bob->GetStats().send_bandwidth_bps != 300000)
+ bitrate_changed = true;
+ });
+ s.RunUntil(TimeDelta::Seconds(2), TimeDelta::Millis(5),
+ [&bitrate_changed, &packet_received] {
+ return packet_received && bitrate_changed;
+ });
+ EXPECT_TRUE(packet_received);
+ EXPECT_TRUE(bitrate_changed);
+}
+namespace {
+void SetupVideoCall(Scenario& s, VideoQualityAnalyzer* analyzer) {
+ CallClientConfig call_config;
+ auto* alice = s.CreateClient("alice", call_config);
+ auto* bob = s.CreateClient("bob", call_config);
+ NetworkSimulationConfig network_config;
+ network_config.bandwidth = DataRate::KilobitsPerSec(1000);
+ network_config.delay = TimeDelta::Millis(50);
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+ VideoStreamConfig video;
+ if (analyzer) {
+ video.source.capture = VideoStreamConfig::Source::Capture::kVideoFile;
+ video.source.video_file.name = "foreman_cif";
+ video.source.video_file.width = 352;
+ video.source.video_file.height = 288;
+ video.source.framerate = 30;
+ video.encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ video.encoder.implementation =
+ VideoStreamConfig::Encoder::Implementation::kSoftware;
+ video.hooks.frame_pair_handlers = {analyzer->Handler()};
+ }
+ s.CreateVideoStream(route->forward(), video);
+ s.CreateAudioStream(route->forward(), AudioStreamConfig());
+}
+} // namespace
+
+TEST(ScenarioTest, SimTimeEncoding) {
+ VideoQualityAnalyzerConfig analyzer_config;
+ analyzer_config.psnr_coverage = 0.1;
+ VideoQualityAnalyzer analyzer(analyzer_config);
+ {
+ Scenario s("scenario/encode_sim", false);
+ SetupVideoCall(s, &analyzer);
+ s.RunFor(TimeDelta::Seconds(2));
+ }
+ // Regression tests based on previous runs.
+ EXPECT_EQ(analyzer.stats().lost_count, 0);
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 38, 5);
+}
+
+// TODO(bugs.webrtc.org/10515): Remove this when performance has been improved.
+#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG)
+#define MAYBE_RealTimeEncoding DISABLED_RealTimeEncoding
+#else
+#define MAYBE_RealTimeEncoding RealTimeEncoding
+#endif
+TEST(ScenarioTest, MAYBE_RealTimeEncoding) {
+ VideoQualityAnalyzerConfig analyzer_config;
+ analyzer_config.psnr_coverage = 0.1;
+ VideoQualityAnalyzer analyzer(analyzer_config);
+ {
+ Scenario s("scenario/encode_real", true);
+ SetupVideoCall(s, &analyzer);
+ s.RunFor(TimeDelta::Seconds(2));
+ }
+ // Regression tests based on previous runs.
+ EXPECT_LT(analyzer.stats().lost_count, 2);
+ // This far below expected but ensures that we get something.
+ EXPECT_GT(analyzer.stats().psnr_with_freeze.Mean(), 10);
+}
+
+TEST(ScenarioTest, SimTimeFakeing) {
+ Scenario s("scenario/encode_sim", false);
+ SetupVideoCall(s, nullptr);
+ s.RunFor(TimeDelta::Seconds(2));
+}
+
+TEST(ScenarioTest, WritesToRtcEventLog) {
+ MemoryLogStorage storage;
+ {
+ Scenario s(storage.CreateFactory(), false);
+ SetupVideoCall(s, nullptr);
+ s.RunFor(TimeDelta::Seconds(1));
+ }
+ auto logs = storage.logs();
+ // We expect that a rtc event log has been created and that it has some data.
+ EXPECT_GE(storage.logs().at("alice.rtc.dat").size(), 1u);
+}
+
+TEST(ScenarioTest,
+ RetransmitsVideoPacketsInAudioAndVideoCallWithSendSideBweAndLoss) {
+ // Make sure audio packets are included in transport feedback.
+ test::ScopedFieldTrials override_field_trials(
+ "WebRTC-Audio-ABWENoTWCC/Disabled/");
+
+ Scenario s;
+ CallClientConfig call_client_config;
+ call_client_config.transport.rates.start_rate = DataRate::KilobitsPerSec(300);
+ auto* alice = s.CreateClient("alice", call_client_config);
+ auto* bob = s.CreateClient("bob", call_client_config);
+ NetworkSimulationConfig network_config;
+ // Add some loss and delay.
+ network_config.delay = TimeDelta::Millis(200);
+ network_config.loss_rate = 0.05;
+ auto alice_net = s.CreateSimulationNode(network_config);
+ auto bob_net = s.CreateSimulationNode(network_config);
+ auto route = s.CreateRoutes(alice, {alice_net}, bob, {bob_net});
+
+ // First add an audio stream, then a video stream.
+ // Needed to make sure audio RTP module is selected first when sending
+ // transport feedback message.
+ AudioStreamConfig audio_stream_config;
+ audio_stream_config.encoder.min_rate = DataRate::KilobitsPerSec(6);
+ audio_stream_config.encoder.max_rate = DataRate::KilobitsPerSec(64);
+ audio_stream_config.encoder.allocate_bitrate = true;
+ audio_stream_config.stream.in_bandwidth_estimation = true;
+ s.CreateAudioStream(route->forward(), audio_stream_config);
+ s.CreateAudioStream(route->reverse(), audio_stream_config);
+
+ VideoStreamConfig video_stream_config;
+ auto video = s.CreateVideoStream(route->forward(), video_stream_config);
+ s.CreateVideoStream(route->reverse(), video_stream_config);
+
+ // Run for 10 seconds.
+ s.RunFor(TimeDelta::Seconds(10));
+ // Make sure retransmissions have happened.
+ int retransmit_packets = 0;
+
+ VideoSendStream::Stats stats;
+ alice->SendTask([&]() { stats = video->send()->GetStats(); });
+
+ for (const auto& substream : stats.substreams) {
+ retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
+ }
+ EXPECT_GT(retransmit_packets, 0);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/stats_collection.cc b/third_party/libwebrtc/test/scenario/stats_collection.cc
new file mode 100644
index 0000000000..e32696de71
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection.cc
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/scenario/stats_collection.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/memory_usage.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace test {
+
+VideoQualityAnalyzer::VideoQualityAnalyzer(
+ VideoQualityAnalyzerConfig config,
+ std::unique_ptr<RtcEventLogOutput> writer)
+ : config_(config), writer_(std::move(writer)) {
+ if (writer_) {
+ PrintHeaders();
+ }
+}
+
+VideoQualityAnalyzer::~VideoQualityAnalyzer() = default;
+
+void VideoQualityAnalyzer::PrintHeaders() {
+ writer_->Write(
+ "capture_time render_time capture_width capture_height render_width "
+ "render_height psnr\n");
+}
+
+std::function<void(const VideoFramePair&)> VideoQualityAnalyzer::Handler() {
+ return [this](VideoFramePair pair) { HandleFramePair(pair); };
+}
+
+void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample, double psnr) {
+ layer_analyzers_[sample.layer_id].HandleFramePair(sample, psnr,
+ writer_.get());
+ cached_.reset();
+}
+
+void VideoQualityAnalyzer::HandleFramePair(VideoFramePair sample) {
+ double psnr = NAN;
+ if (sample.decoded)
+ psnr = I420PSNR(*sample.captured->ToI420(), *sample.decoded->ToI420());
+
+ if (config_.thread) {
+ config_.thread->PostTask(
+ [this, sample, psnr] { HandleFramePair(std::move(sample), psnr); });
+ } else {
+ HandleFramePair(std::move(sample), psnr);
+ }
+}
+
+std::vector<VideoQualityStats> VideoQualityAnalyzer::layer_stats() const {
+ std::vector<VideoQualityStats> res;
+ for (auto& layer : layer_analyzers_)
+ res.push_back(layer.second.stats_);
+ return res;
+}
+
+VideoQualityStats& VideoQualityAnalyzer::stats() {
+ if (!cached_) {
+ cached_ = VideoQualityStats();
+ for (auto& layer : layer_analyzers_)
+ cached_->AddStats(layer.second.stats_);
+ }
+ return *cached_;
+}
+
+void VideoLayerAnalyzer::HandleFramePair(VideoFramePair sample,
+ double psnr,
+ RtcEventLogOutput* writer) {
+ RTC_CHECK(sample.captured);
+ HandleCapturedFrame(sample);
+ if (!sample.decoded) {
+ // Can only happen in the beginning of a call or if the resolution is
+ // reduced. Otherwise we will detect a freeze.
+ ++stats_.lost_count;
+ ++skip_count_;
+ } else {
+ stats_.psnr_with_freeze.AddSample(psnr);
+ if (sample.repeated) {
+ ++stats_.freeze_count;
+ ++skip_count_;
+ } else {
+ stats_.psnr.AddSample(psnr);
+ HandleRenderedFrame(sample);
+ }
+ }
+ if (writer) {
+ LogWriteFormat(writer, "%.3f %.3f %.3f %i %i %i %i %.3f\n",
+ sample.capture_time.seconds<double>(),
+ sample.render_time.seconds<double>(),
+ sample.captured->width(), sample.captured->height(),
+ sample.decoded ? sample.decoded->width() : 0,
+ sample.decoded ? sample.decoded->height() : 0, psnr);
+ }
+}
+
+void VideoLayerAnalyzer::HandleCapturedFrame(const VideoFramePair& sample) {
+ stats_.capture.AddFrameInfo(*sample.captured, sample.capture_time);
+ if (last_freeze_time_.IsInfinite())
+ last_freeze_time_ = sample.capture_time;
+}
+
+void VideoLayerAnalyzer::HandleRenderedFrame(const VideoFramePair& sample) {
+ stats_.capture_to_decoded_delay.AddSample(sample.decoded_time -
+ sample.capture_time);
+ stats_.end_to_end_delay.AddSample(sample.render_time - sample.capture_time);
+ stats_.render.AddFrameInfo(*sample.decoded, sample.render_time);
+ stats_.skipped_between_rendered.AddSample(skip_count_);
+ skip_count_ = 0;
+
+ if (last_render_time_.IsFinite()) {
+ RTC_DCHECK(sample.render_time.IsFinite());
+ TimeDelta render_interval = sample.render_time - last_render_time_;
+ TimeDelta mean_interval = stats_.render.frames.interval().Mean();
+ if (render_interval > TimeDelta::Millis(150) + mean_interval ||
+ render_interval > 3 * mean_interval) {
+ stats_.freeze_duration.AddSample(render_interval);
+ stats_.time_between_freezes.AddSample(last_render_time_ -
+ last_freeze_time_);
+ last_freeze_time_ = sample.render_time;
+ }
+ }
+ last_render_time_ = sample.render_time;
+}
+
+void CallStatsCollector::AddStats(Call::Stats sample) {
+ if (sample.send_bandwidth_bps > 0)
+ stats_.target_rate.AddSampleBps(sample.send_bandwidth_bps);
+ if (sample.pacer_delay_ms > 0)
+ stats_.pacer_delay.AddSample(TimeDelta::Millis(sample.pacer_delay_ms));
+ if (sample.rtt_ms > 0)
+ stats_.round_trip_time.AddSample(TimeDelta::Millis(sample.rtt_ms));
+ stats_.memory_usage.AddSample(rtc::GetProcessResidentSizeBytes());
+}
+
+void AudioReceiveStatsCollector::AddStats(
+ AudioReceiveStreamInterface::Stats sample) {
+ stats_.expand_rate.AddSample(sample.expand_rate);
+ stats_.accelerate_rate.AddSample(sample.accelerate_rate);
+ stats_.jitter_buffer.AddSampleMs(sample.jitter_buffer_ms);
+}
+
+void VideoSendStatsCollector::AddStats(VideoSendStream::Stats sample,
+ Timestamp at_time) {
+ // It's not certain that we yet have estimates for any of these stats.
+ // Check that they are positive before mixing them in.
+ if (sample.encode_frame_rate <= 0)
+ return;
+
+ stats_.encode_frame_rate.AddSample(sample.encode_frame_rate);
+ stats_.encode_time.AddSampleMs(sample.avg_encode_time_ms);
+ stats_.encode_usage.AddSample(sample.encode_usage_percent / 100.0);
+ stats_.media_bitrate.AddSampleBps(sample.media_bitrate_bps);
+
+ size_t fec_bytes = 0;
+ for (const auto& kv : sample.substreams) {
+ fec_bytes += kv.second.rtp_stats.fec.payload_bytes +
+ kv.second.rtp_stats.fec.padding_bytes;
+ }
+ if (last_update_.IsFinite()) {
+ auto fec_delta = DataSize::Bytes(fec_bytes - last_fec_bytes_);
+ auto time_delta = at_time - last_update_;
+ stats_.fec_bitrate.AddSample(fec_delta / time_delta);
+ }
+ last_fec_bytes_ = fec_bytes;
+ last_update_ = at_time;
+}
+
+void VideoReceiveStatsCollector::AddStats(
+ VideoReceiveStreamInterface::Stats sample) {
+ if (sample.decode_ms > 0)
+ stats_.decode_time.AddSampleMs(sample.decode_ms);
+ if (sample.max_decode_ms > 0)
+ stats_.decode_time_max.AddSampleMs(sample.max_decode_ms);
+ if (sample.width > 0 && sample.height > 0) {
+ stats_.decode_pixels.AddSample(sample.width * sample.height);
+ stats_.resolution.AddSample(sample.height);
+ }
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/stats_collection.h b/third_party/libwebrtc/test/scenario/stats_collection.h
new file mode 100644
index 0000000000..1f5d8daea7
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_STATS_COLLECTION_H_
+#define TEST_SCENARIO_STATS_COLLECTION_H_
+
+#include <map>
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "call/call.h"
+#include "rtc_base/thread.h"
+#include "test/logging/log_writer.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+
+struct VideoQualityAnalyzerConfig {
+ double psnr_coverage = 1;
+ rtc::Thread* thread = nullptr;
+};
+
+class VideoLayerAnalyzer {
+ public:
+ void HandleCapturedFrame(const VideoFramePair& sample);
+ void HandleRenderedFrame(const VideoFramePair& sample);
+ void HandleFramePair(VideoFramePair sample,
+ double psnr,
+ RtcEventLogOutput* writer);
+ VideoQualityStats stats_;
+ Timestamp last_capture_time_ = Timestamp::MinusInfinity();
+ Timestamp last_render_time_ = Timestamp::MinusInfinity();
+ Timestamp last_freeze_time_ = Timestamp::MinusInfinity();
+ int skip_count_ = 0;
+};
+
+class VideoQualityAnalyzer {
+ public:
+ explicit VideoQualityAnalyzer(
+ VideoQualityAnalyzerConfig config = VideoQualityAnalyzerConfig(),
+ std::unique_ptr<RtcEventLogOutput> writer = nullptr);
+ ~VideoQualityAnalyzer();
+ void HandleFramePair(VideoFramePair sample);
+ std::vector<VideoQualityStats> layer_stats() const;
+ VideoQualityStats& stats();
+ void PrintHeaders();
+ void PrintFrameInfo(const VideoFramePair& sample);
+ std::function<void(const VideoFramePair&)> Handler();
+
+ private:
+ void HandleFramePair(VideoFramePair sample, double psnr);
+ const VideoQualityAnalyzerConfig config_;
+ std::map<int, VideoLayerAnalyzer> layer_analyzers_;
+ const std::unique_ptr<RtcEventLogOutput> writer_;
+ absl::optional<VideoQualityStats> cached_;
+};
+
+class CallStatsCollector {
+ public:
+ void AddStats(Call::Stats sample);
+ CollectedCallStats& stats() { return stats_; }
+
+ private:
+ CollectedCallStats stats_;
+};
+class AudioReceiveStatsCollector {
+ public:
+ void AddStats(AudioReceiveStreamInterface::Stats sample);
+ CollectedAudioReceiveStats& stats() { return stats_; }
+
+ private:
+ CollectedAudioReceiveStats stats_;
+};
+class VideoSendStatsCollector {
+ public:
+ void AddStats(VideoSendStream::Stats sample, Timestamp at_time);
+ CollectedVideoSendStats& stats() { return stats_; }
+
+ private:
+ CollectedVideoSendStats stats_;
+ Timestamp last_update_ = Timestamp::MinusInfinity();
+ size_t last_fec_bytes_ = 0;
+};
+class VideoReceiveStatsCollector {
+ public:
+ void AddStats(VideoReceiveStreamInterface::Stats sample);
+ CollectedVideoReceiveStats& stats() { return stats_; }
+
+ private:
+ CollectedVideoReceiveStats stats_;
+};
+
+struct CallStatsCollectors {
+ CallStatsCollector call;
+ AudioReceiveStatsCollector audio_receive;
+ VideoSendStatsCollector video_send;
+ VideoReceiveStatsCollector video_receive;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_STATS_COLLECTION_H_
diff --git a/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc
new file mode 100644
index 0000000000..9f46f10073
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/stats_collection_unittest.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/stats_collection.h"
+
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+void CreateAnalyzedStream(Scenario* s,
+ NetworkSimulationConfig network_config,
+ VideoQualityAnalyzer* analyzer,
+ CallStatsCollectors* collectors) {
+ VideoStreamConfig config;
+ config.encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ config.encoder.implementation =
+ VideoStreamConfig::Encoder::Implementation::kSoftware;
+ config.hooks.frame_pair_handlers = {analyzer->Handler()};
+ auto* caller = s->CreateClient("caller", CallClientConfig());
+ auto* callee = s->CreateClient("callee", CallClientConfig());
+ auto route =
+ s->CreateRoutes(caller, {s->CreateSimulationNode(network_config)}, callee,
+ {s->CreateSimulationNode(NetworkSimulationConfig())});
+ VideoStreamPair* video = s->CreateVideoStream(route->forward(), config);
+ auto* audio = s->CreateAudioStream(route->forward(), AudioStreamConfig());
+ s->Every(TimeDelta::Seconds(1), [=] {
+ collectors->call.AddStats(caller->GetStats());
+
+ VideoSendStream::Stats send_stats;
+ caller->SendTask([&]() { send_stats = video->send()->GetStats(); });
+ collectors->video_send.AddStats(send_stats, s->Now());
+
+ AudioReceiveStreamInterface::Stats receive_stats;
+ caller->SendTask([&]() { receive_stats = audio->receive()->GetStats(); });
+ collectors->audio_receive.AddStats(receive_stats);
+
+ // Querying the video stats from within the expected runtime environment
+ // (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that
+ // we're currently on).
+ VideoReceiveStreamInterface::Stats video_receive_stats;
+ auto* video_stream = video->receive();
+ callee->SendTask([&video_stream, &video_receive_stats]() {
+ video_receive_stats = video_stream->GetStats();
+ });
+ collectors->video_receive.AddStats(video_receive_stats);
+ });
+}
+} // namespace
+
+TEST(ScenarioAnalyzerTest, PsnrIsHighWhenNetworkIsGood) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig good_network;
+ good_network.bandwidth = DataRate::KilobitsPerSec(1000);
+ CreateAnalyzedStream(&s, good_network, &analyzer, &stats);
+ s.RunFor(TimeDelta::Seconds(3));
+ }
+ // This is a change detecting test, the targets are based on previous runs and
+ // might change due to changes in configuration and encoder etc. The main
+ // purpose is to show how the stats can be used. To avoid being overly
+ // sensistive to change, the ranges are chosen to be quite large.
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 43, 10);
+ EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 700, 300);
+ EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 500, 200);
+ EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10);
+ EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 40, 20);
+}
+
+TEST(ScenarioAnalyzerTest, PsnrIsLowWhenNetworkIsBad) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig bad_network;
+ bad_network.bandwidth = DataRate::KilobitsPerSec(100);
+ bad_network.loss_rate = 0.02;
+ CreateAnalyzedStream(&s, bad_network, &analyzer, &stats);
+ s.RunFor(TimeDelta::Seconds(3));
+ }
+ // This is a change detecting test, the targets are based on previous runs and
+ // might change due to changes in configuration and encoder etc.
+ EXPECT_NEAR(analyzer.stats().psnr_with_freeze.Mean(), 20, 10);
+ EXPECT_NEAR(stats.call.stats().target_rate.Mean().kbps(), 75, 50);
+ EXPECT_NEAR(stats.video_send.stats().media_bitrate.Mean().kbps(), 70, 30);
+ EXPECT_NEAR(stats.video_receive.stats().resolution.Mean(), 180, 10);
+ EXPECT_NEAR(stats.audio_receive.stats().jitter_buffer.Mean().ms(), 250, 200);
+}
+
+TEST(ScenarioAnalyzerTest, CountsCapturedButNotRendered) {
+ VideoQualityAnalyzer analyzer;
+ CallStatsCollectors stats;
+ {
+ Scenario s;
+ NetworkSimulationConfig long_delays;
+ long_delays.delay = TimeDelta::Seconds(5);
+ CreateAnalyzedStream(&s, long_delays, &analyzer, &stats);
+ // Enough time to send frames but not enough to deliver.
+ s.RunFor(TimeDelta::Millis(100));
+ }
+ EXPECT_GE(analyzer.stats().capture.count, 1);
+ EXPECT_EQ(analyzer.stats().render.count, 0);
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_frame_matcher.cc b/third_party/libwebrtc/test/scenario/video_frame_matcher.cc
new file mode 100644
index 0000000000..dc8cd59756
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_frame_matcher.cc
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/video_frame_matcher.h"
+
+#include <utility>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr int kThumbWidth = 96;
+constexpr int kThumbHeight = 96;
+} // namespace
+
+VideoFrameMatcher::VideoFrameMatcher(
+ std::vector<std::function<void(const VideoFramePair&)> >
+ frame_pair_handlers)
+ : frame_pair_handlers_(std::move(frame_pair_handlers)),
+ task_queue_("VideoAnalyzer") {}
+
+VideoFrameMatcher::~VideoFrameMatcher() {
+ task_queue_.SendTask([this] { Finalize(); });
+}
+
+void VideoFrameMatcher::RegisterLayer(int layer_id) {
+ task_queue_.PostTask([this, layer_id] { layers_[layer_id] = VideoLayer(); });
+}
+
+void VideoFrameMatcher::OnCapturedFrame(const VideoFrame& frame,
+ Timestamp at_time) {
+ CapturedFrame captured;
+ captured.id = next_capture_id_++;
+ captured.capture_time = at_time;
+ captured.frame = frame.video_frame_buffer();
+ captured.thumb = ScaleVideoFrameBuffer(*frame.video_frame_buffer()->ToI420(),
+ kThumbWidth, kThumbHeight),
+ task_queue_.PostTask([this, captured]() {
+ for (auto& layer : layers_) {
+ CapturedFrame copy = captured;
+ if (layer.second.last_decode &&
+ layer.second.last_decode->frame->width() <= captured.frame->width()) {
+ copy.best_score = I420SSE(*captured.thumb->GetI420(),
+ *layer.second.last_decode->thumb->GetI420());
+ copy.best_decode = layer.second.last_decode;
+ }
+ layer.second.captured_frames.push_back(std::move(copy));
+ }
+ });
+}
+
+void VideoFrameMatcher::OnDecodedFrame(const VideoFrame& frame,
+ int layer_id,
+ Timestamp render_time,
+ Timestamp at_time) {
+ rtc::scoped_refptr<DecodedFrame> decoded(new DecodedFrame{});
+ decoded->decoded_time = at_time;
+ decoded->render_time = render_time;
+ decoded->frame = frame.video_frame_buffer();
+ decoded->thumb = ScaleVideoFrameBuffer(*frame.video_frame_buffer()->ToI420(),
+ kThumbWidth, kThumbHeight);
+
+ task_queue_.PostTask([this, decoded, layer_id] {
+ auto& layer = layers_[layer_id];
+ decoded->id = layer.next_decoded_id++;
+ layer.last_decode = decoded;
+ for (auto& captured : layer.captured_frames) {
+ // We can't match with a smaller capture.
+ if (captured.frame->width() < decoded->frame->width()) {
+ captured.matched = true;
+ continue;
+ }
+ double score =
+ I420SSE(*captured.thumb->GetI420(), *decoded->thumb->GetI420());
+ if (score < captured.best_score) {
+ captured.best_score = score;
+ captured.best_decode = decoded;
+ captured.matched = false;
+ } else {
+ captured.matched = true;
+ }
+ }
+ while (!layer.captured_frames.empty() &&
+ layer.captured_frames.front().matched) {
+ HandleMatch(std::move(layer.captured_frames.front()), layer_id);
+ layer.captured_frames.pop_front();
+ }
+ });
+}
+
+bool VideoFrameMatcher::Active() const {
+ return !frame_pair_handlers_.empty();
+}
+
+void VideoFrameMatcher::HandleMatch(VideoFrameMatcher::CapturedFrame captured,
+ int layer_id) {
+ VideoFramePair frame_pair;
+ frame_pair.layer_id = layer_id;
+ frame_pair.captured = captured.frame;
+ frame_pair.capture_id = captured.id;
+ frame_pair.capture_time = captured.capture_time;
+ if (captured.best_decode) {
+ frame_pair.decode_id = captured.best_decode->id;
+ frame_pair.decoded = captured.best_decode->frame;
+ frame_pair.decoded_time = captured.best_decode->decoded_time;
+ // We can't render frames before they have been decoded.
+ frame_pair.render_time = std::max(captured.best_decode->render_time,
+ captured.best_decode->decoded_time);
+ frame_pair.repeated = captured.best_decode->repeat_count++;
+ }
+ for (auto& handler : frame_pair_handlers_)
+ handler(frame_pair);
+}
+
+void VideoFrameMatcher::Finalize() {
+ for (auto& layer : layers_) {
+ while (!layer.second.captured_frames.empty()) {
+ HandleMatch(std::move(layer.second.captured_frames.front()), layer.first);
+ layer.second.captured_frames.pop_front();
+ }
+ }
+}
+
+CapturedFrameTap::CapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher)
+ : clock_(clock), matcher_(matcher) {}
+
+void CapturedFrameTap::OnFrame(const VideoFrame& frame) {
+ matcher_->OnCapturedFrame(frame, clock_->CurrentTime());
+}
+void CapturedFrameTap::OnDiscardedFrame() {
+ discarded_count_++;
+}
+
+ForwardingCapturedFrameTap::ForwardingCapturedFrameTap(
+ Clock* clock,
+ VideoFrameMatcher* matcher,
+ rtc::VideoSourceInterface<VideoFrame>* source)
+ : clock_(clock), matcher_(matcher), source_(source) {}
+
+void ForwardingCapturedFrameTap::OnFrame(const VideoFrame& frame) {
+ RTC_CHECK(sink_);
+ matcher_->OnCapturedFrame(frame, clock_->CurrentTime());
+ sink_->OnFrame(frame);
+}
+void ForwardingCapturedFrameTap::OnDiscardedFrame() {
+ RTC_CHECK(sink_);
+ discarded_count_++;
+ sink_->OnDiscardedFrame();
+}
+
+void ForwardingCapturedFrameTap::AddOrUpdateSink(
+ VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ if (!sink_)
+ sink_ = sink;
+ RTC_DCHECK_EQ(sink_, sink);
+ source_->AddOrUpdateSink(this, wants);
+}
+void ForwardingCapturedFrameTap::RemoveSink(
+ VideoSinkInterface<VideoFrame>* sink) {
+ source_->RemoveSink(this);
+ sink_ = nullptr;
+}
+
+DecodedFrameTap::DecodedFrameTap(Clock* clock,
+ VideoFrameMatcher* matcher,
+ int layer_id)
+ : clock_(clock), matcher_(matcher), layer_id_(layer_id) {
+ matcher_->RegisterLayer(layer_id_);
+}
+
+void DecodedFrameTap::OnFrame(const VideoFrame& frame) {
+ matcher_->OnDecodedFrame(frame, layer_id_,
+ Timestamp::Millis(frame.render_time_ms()),
+ clock_->CurrentTime());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_frame_matcher.h b/third_party/libwebrtc/test/scenario/video_frame_matcher.h
new file mode 100644
index 0000000000..a3aa85447d
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_frame_matcher.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
+#define TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/units/timestamp.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_sink_interface.h"
+#include "api/video/video_source_interface.h"
+#include "rtc_base/ref_counted_object.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "system_wrappers/include/clock.h"
+#include "test/scenario/performance_stats.h"
+
+namespace webrtc {
+namespace test {
+
+class VideoFrameMatcher {
+ public:
+ explicit VideoFrameMatcher(
+ std::vector<std::function<void(const VideoFramePair&)>>
+ frame_pair_handlers);
+ ~VideoFrameMatcher();
+ void RegisterLayer(int layer_id);
+ void OnCapturedFrame(const VideoFrame& frame, Timestamp at_time);
+ void OnDecodedFrame(const VideoFrame& frame,
+ int layer_id,
+ Timestamp render_time,
+ Timestamp at_time);
+ bool Active() const;
+
+ private:
+ struct DecodedFrameBase {
+ int id;
+ Timestamp decoded_time = Timestamp::PlusInfinity();
+ Timestamp render_time = Timestamp::PlusInfinity();
+ rtc::scoped_refptr<VideoFrameBuffer> frame;
+ rtc::scoped_refptr<VideoFrameBuffer> thumb;
+ int repeat_count = 0;
+ };
+ using DecodedFrame = rtc::FinalRefCountedObject<DecodedFrameBase>;
+ struct CapturedFrame {
+ int id;
+ Timestamp capture_time = Timestamp::PlusInfinity();
+ rtc::scoped_refptr<VideoFrameBuffer> frame;
+ rtc::scoped_refptr<VideoFrameBuffer> thumb;
+ double best_score = INFINITY;
+ rtc::scoped_refptr<DecodedFrame> best_decode;
+ bool matched = false;
+ };
+ struct VideoLayer {
+ int layer_id;
+ std::deque<CapturedFrame> captured_frames;
+ rtc::scoped_refptr<DecodedFrame> last_decode;
+ int next_decoded_id = 1;
+ };
+ void HandleMatch(CapturedFrame captured, int layer_id);
+ void Finalize();
+ int next_capture_id_ = 1;
+ std::vector<std::function<void(const VideoFramePair&)>> frame_pair_handlers_;
+ std::map<int, VideoLayer> layers_;
+ TaskQueueForTest task_queue_;
+};
+
+class CapturedFrameTap : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ CapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher);
+ CapturedFrameTap(CapturedFrameTap&) = delete;
+ CapturedFrameTap& operator=(CapturedFrameTap&) = delete;
+
+ void OnFrame(const VideoFrame& frame) override;
+ void OnDiscardedFrame() override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ int discarded_count_ = 0;
+};
+
+class ForwardingCapturedFrameTap
+ : public rtc::VideoSinkInterface<VideoFrame>,
+ public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ ForwardingCapturedFrameTap(Clock* clock,
+ VideoFrameMatcher* matcher,
+ rtc::VideoSourceInterface<VideoFrame>* source);
+ ForwardingCapturedFrameTap(ForwardingCapturedFrameTap&) = delete;
+ ForwardingCapturedFrameTap& operator=(ForwardingCapturedFrameTap&) = delete;
+
+ // VideoSinkInterface interface
+ void OnFrame(const VideoFrame& frame) override;
+ void OnDiscardedFrame() override;
+
+ // VideoSourceInterface interface
+ void AddOrUpdateSink(VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<VideoFrame>* sink) override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ rtc::VideoSourceInterface<VideoFrame>* const source_;
+ VideoSinkInterface<VideoFrame>* sink_ = nullptr;
+ int discarded_count_ = 0;
+};
+
+class DecodedFrameTap : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ DecodedFrameTap(Clock* clock, VideoFrameMatcher* matcher, int layer_id);
+ // VideoSinkInterface interface
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ Clock* const clock_;
+ VideoFrameMatcher* const matcher_;
+ int layer_id_;
+};
+} // namespace test
+} // namespace webrtc
+#endif // TEST_SCENARIO_VIDEO_FRAME_MATCHER_H_
diff --git a/third_party/libwebrtc/test/scenario/video_stream.cc b/third_party/libwebrtc/test/scenario/video_stream.cc
new file mode 100644
index 0000000000..8d627d8893
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream.cc
@@ -0,0 +1,636 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/scenario/video_stream.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+
+#include "absl/strings/match.h"
+#include "api/test/create_frame_generator.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/test/video/function_video_encoder_factory.h"
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "media/base/media_constants.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/webrtc_video_engine.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "test/call_test.h"
+#include "test/fake_encoder.h"
+#include "test/scenario/hardware_codecs.h"
+#include "test/testsupport/file_utils.h"
+#include "video/config/encoder_stream_factory.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+enum : int { // The first valid value is 1.
+ kTransportSequenceNumberExtensionId = 1,
+ kAbsSendTimeExtensionId,
+ kVideoContentTypeExtensionId,
+ kVideoRotationRtpExtensionId,
+};
+
+constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
+uint8_t CodecTypeToPayloadType(VideoCodecType codec_type) {
+ switch (codec_type) {
+ case VideoCodecType::kVideoCodecGeneric:
+ return CallTest::kFakeVideoSendPayloadType;
+ case VideoCodecType::kVideoCodecVP8:
+ return CallTest::kPayloadTypeVP8;
+ case VideoCodecType::kVideoCodecVP9:
+ return CallTest::kPayloadTypeVP9;
+ case VideoCodecType::kVideoCodecH264:
+ return CallTest::kPayloadTypeH264;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return {};
+}
+std::string CodecTypeToCodecName(VideoCodecType codec_type) {
+ switch (codec_type) {
+ case VideoCodecType::kVideoCodecGeneric:
+ return "";
+ case VideoCodecType::kVideoCodecVP8:
+ return cricket::kVp8CodecName;
+ case VideoCodecType::kVideoCodecVP9:
+ return cricket::kVp9CodecName;
+ case VideoCodecType::kVideoCodecH264:
+ return cricket::kH264CodecName;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ return {};
+}
+VideoEncoderConfig::ContentType ConvertContentType(
+ VideoStreamConfig::Encoder::ContentType content_type) {
+ switch (content_type) {
+ case VideoStreamConfig::Encoder::ContentType::kVideo:
+ return VideoEncoderConfig::ContentType::kRealtimeVideo;
+ case VideoStreamConfig::Encoder::ContentType::kScreen:
+ return VideoEncoderConfig::ContentType::kScreen;
+ }
+}
+
+std::string TransformFilePath(std::string path) {
+ static const std::string resource_prefix = "res://";
+ int ext_pos = path.rfind('.');
+ if (ext_pos < 0) {
+ return test::ResourcePath(path, "yuv");
+ } else if (absl::StartsWith(path, resource_prefix)) {
+ std::string name = path.substr(resource_prefix.length(), ext_pos);
+ std::string ext = path.substr(ext_pos, path.size());
+ return test::ResourcePath(name, ext);
+ }
+ return path;
+}
+
+VideoSendStream::Config CreateVideoSendStreamConfig(
+ VideoStreamConfig config,
+ std::vector<uint32_t> ssrcs,
+ std::vector<uint32_t> rtx_ssrcs,
+ Transport* send_transport) {
+ VideoSendStream::Config send_config(send_transport);
+ send_config.rtp.payload_name = CodecTypeToPayloadString(config.encoder.codec);
+ send_config.rtp.payload_type = CodecTypeToPayloadType(config.encoder.codec);
+ send_config.rtp.nack.rtp_history_ms =
+ config.stream.nack_history_time.ms<int>();
+
+ send_config.rtp.ssrcs = ssrcs;
+ send_config.rtp.extensions = GetVideoRtpExtensions(config);
+
+ if (config.stream.use_rtx) {
+ send_config.rtp.rtx.payload_type = CallTest::kSendRtxPayloadType;
+ send_config.rtp.rtx.ssrcs = rtx_ssrcs;
+ }
+ if (config.stream.use_flexfec) {
+ send_config.rtp.flexfec.payload_type = CallTest::kFlexfecPayloadType;
+ send_config.rtp.flexfec.ssrc = CallTest::kFlexfecSendSsrc;
+ send_config.rtp.flexfec.protected_media_ssrcs = ssrcs;
+ }
+ if (config.stream.use_ulpfec) {
+ send_config.rtp.ulpfec.red_payload_type = CallTest::kRedPayloadType;
+ send_config.rtp.ulpfec.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
+ send_config.rtp.ulpfec.red_rtx_payload_type = CallTest::kRtxRedPayloadType;
+ }
+ return send_config;
+}
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateVp9SpecificSettings(VideoStreamConfig video_config) {
+ constexpr auto kScreen = VideoStreamConfig::Encoder::ContentType::kScreen;
+ VideoStreamConfig::Encoder conf = video_config.encoder;
+ VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings();
+ // TODO(bugs.webrtc.org/11607): Support separate scalability mode per
+ // simulcast stream.
+ ScalabilityMode scalability_mode = conf.simulcast_streams[0];
+ vp9.keyFrameInterval = conf.key_frame_interval.value_or(0);
+ vp9.numberOfTemporalLayers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ vp9.numberOfSpatialLayers =
+ ScalabilityModeToNumSpatialLayers(scalability_mode);
+ vp9.interLayerPred = ScalabilityModeToInterLayerPredMode(scalability_mode);
+
+ if (conf.content_type == kScreen &&
+ (video_config.source.framerate > 5 || vp9.numberOfSpatialLayers >= 3)) {
+ vp9.flexibleMode = true;
+ }
+
+ if (conf.content_type == kScreen || vp9.numberOfTemporalLayers > 1 ||
+ vp9.numberOfSpatialLayers > 1) {
+ vp9.automaticResizeOn = false;
+ vp9.denoisingOn = false;
+ } else {
+ vp9.automaticResizeOn = conf.single.automatic_scaling;
+ vp9.denoisingOn = conf.single.denoising;
+ }
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9);
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateVp8SpecificSettings(VideoStreamConfig config) {
+ VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
+ vp8_settings.keyFrameInterval = config.encoder.key_frame_interval.value_or(0);
+ // TODO(bugs.webrtc.org/11607): Support separate scalability mode per
+ // simulcast stream.
+ ScalabilityMode scalability_mode = config.encoder.simulcast_streams[0];
+ vp8_settings.numberOfTemporalLayers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ if (vp8_settings.numberOfTemporalLayers > 1 ||
+ config.encoder.simulcast_streams.size() > 1) {
+ vp8_settings.automaticResizeOn = false;
+ vp8_settings.denoisingOn = false;
+ } else {
+ vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling;
+ vp8_settings.denoisingOn = config.encoder.single.denoising;
+ }
+ return rtc::make_ref_counted<VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ vp8_settings);
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateH264SpecificSettings(VideoStreamConfig config) {
+ RTC_DCHECK_EQ(config.encoder.simulcast_streams.size(), 1);
+ RTC_DCHECK(config.encoder.simulcast_streams[0] == ScalabilityMode::kL1T1);
+ // TODO(bugs.webrtc.org/6883): Set a key frame interval as a setting that
+ // isn't codec specific.
+ RTC_CHECK_EQ(0, config.encoder.key_frame_interval.value_or(0));
+ return nullptr;
+}
+
+rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
+CreateEncoderSpecificSettings(VideoStreamConfig config) {
+ using Codec = VideoStreamConfig::Encoder::Codec;
+ switch (config.encoder.codec) {
+ case Codec::kVideoCodecH264:
+ return CreateH264SpecificSettings(config);
+ case Codec::kVideoCodecVP8:
+ return CreateVp8SpecificSettings(config);
+ case Codec::kVideoCodecVP9:
+ return CreateVp9SpecificSettings(config);
+ case Codec::kVideoCodecGeneric:
+ case Codec::kVideoCodecAV1:
+ return nullptr;
+ case Codec::kVideoCodecMultiplex:
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+}
+
+VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) {
+ webrtc::VideoEncoder::EncoderInfo encoder_info;
+ VideoEncoderConfig encoder_config;
+ encoder_config.codec_type = config.encoder.codec;
+ encoder_config.content_type = ConvertContentType(config.encoder.content_type);
+ encoder_config.video_format =
+ SdpVideoFormat(CodecTypeToPayloadString(config.encoder.codec), {});
+
+ encoder_config.number_of_streams = config.encoder.simulcast_streams.size();
+ encoder_config.simulcast_layers =
+ std::vector<VideoStream>(encoder_config.number_of_streams);
+ encoder_config.min_transmit_bitrate_bps = config.stream.pad_to_rate.bps();
+
+ std::string cricket_codec = CodecTypeToCodecName(config.encoder.codec);
+ if (!cricket_codec.empty()) {
+ bool screenshare = config.encoder.content_type ==
+ VideoStreamConfig::Encoder::ContentType::kScreen;
+ encoder_config.video_stream_factory =
+ rtc::make_ref_counted<cricket::EncoderStreamFactory>(
+ cricket_codec, kDefaultMaxQp, screenshare, screenshare,
+ encoder_info);
+ } else {
+ encoder_config.video_stream_factory =
+ rtc::make_ref_counted<DefaultVideoStreamFactory>();
+ }
+
+ // TODO(srte): Base this on encoder capabilities.
+ encoder_config.max_bitrate_bps =
+ config.encoder.max_data_rate.value_or(DataRate::KilobitsPerSec(10000))
+ .bps();
+
+ encoder_config.frame_drop_enabled = config.encoder.frame_dropping;
+ encoder_config.encoder_specific_settings =
+ CreateEncoderSpecificSettings(config);
+
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ auto& layer = encoder_config.simulcast_layers[i];
+ if (config.encoder.max_framerate) {
+ layer.max_framerate = *config.encoder.max_framerate;
+ layer.min_bitrate_bps = config.encoder.min_data_rate->bps_or(-1);
+ }
+ layer.scalability_mode = config.encoder.simulcast_streams[i];
+ }
+
+ return encoder_config;
+}
+
+std::unique_ptr<FrameGeneratorInterface> CreateImageSlideGenerator(
+ Clock* clock,
+ VideoStreamConfig::Source::Slides slides,
+ int framerate) {
+ std::vector<std::string> paths = slides.images.paths;
+ for (std::string& path : paths)
+ path = TransformFilePath(path);
+ if (slides.images.crop.width || slides.images.crop.height) {
+ TimeDelta pause_duration =
+ slides.change_interval - slides.images.crop.scroll_duration;
+ RTC_CHECK_GE(pause_duration, TimeDelta::Zero());
+ int crop_width = slides.images.crop.width.value_or(slides.images.width);
+ int crop_height = slides.images.crop.height.value_or(slides.images.height);
+ RTC_CHECK_LE(crop_width, slides.images.width);
+ RTC_CHECK_LE(crop_height, slides.images.height);
+ return CreateScrollingInputFromYuvFilesFrameGenerator(
+ clock, paths, slides.images.width, slides.images.height, crop_width,
+ crop_height, slides.images.crop.scroll_duration.ms(),
+ pause_duration.ms());
+ } else {
+ return CreateFromYuvFileFrameGenerator(
+ paths, slides.images.width, slides.images.height,
+ slides.change_interval.seconds<double>() * framerate);
+ }
+}
+
+std::unique_ptr<FrameGeneratorInterface> CreateFrameGenerator(
+ Clock* clock,
+ VideoStreamConfig::Source source) {
+ using Capture = VideoStreamConfig::Source::Capture;
+ switch (source.capture) {
+ case Capture::kGenerator:
+ return CreateSquareFrameGenerator(
+ source.generator.width, source.generator.height,
+ source.generator.pixel_format, /*num_squares*/ absl::nullopt);
+ case Capture::kVideoFile:
+ RTC_CHECK(source.video_file.width && source.video_file.height);
+ return CreateFromYuvFileFrameGenerator(
+ {TransformFilePath(source.video_file.name)}, source.video_file.width,
+ source.video_file.height, /*frame_repeat_count*/ 1);
+ case Capture::kGenerateSlides:
+ return CreateSlideFrameGenerator(
+ source.slides.generator.width, source.slides.generator.height,
+ source.slides.change_interval.seconds<double>() * source.framerate);
+ case Capture::kImageSlides:
+ return CreateImageSlideGenerator(clock, source.slides, source.framerate);
+ }
+}
+
+VideoReceiveStreamInterface::Config CreateVideoReceiveStreamConfig(
+ VideoStreamConfig config,
+ Transport* feedback_transport,
+ VideoDecoderFactory* decoder_factory,
+ VideoReceiveStreamInterface::Decoder decoder,
+ rtc::VideoSinkInterface<VideoFrame>* renderer,
+ uint32_t local_ssrc,
+ uint32_t ssrc,
+ uint32_t rtx_ssrc) {
+ VideoReceiveStreamInterface::Config recv(feedback_transport);
+ recv.rtp.local_ssrc = local_ssrc;
+ recv.rtp.extensions = GetVideoRtpExtensions(config);
+
+ RTC_DCHECK(!config.stream.use_rtx ||
+ config.stream.nack_history_time > TimeDelta::Zero());
+ recv.rtp.nack.rtp_history_ms = config.stream.nack_history_time.ms();
+ recv.rtp.protected_by_flexfec = config.stream.use_flexfec;
+ recv.rtp.remote_ssrc = ssrc;
+ recv.decoder_factory = decoder_factory;
+ recv.decoders.push_back(decoder);
+ recv.renderer = renderer;
+ if (config.stream.use_rtx) {
+ recv.rtp.rtx_ssrc = rtx_ssrc;
+ recv.rtp.rtx_associated_payload_types[CallTest::kSendRtxPayloadType] =
+ CodecTypeToPayloadType(config.encoder.codec);
+ }
+ if (config.stream.use_ulpfec) {
+ recv.rtp.red_payload_type = CallTest::kRedPayloadType;
+ recv.rtp.ulpfec_payload_type = CallTest::kUlpfecPayloadType;
+ recv.rtp.rtx_associated_payload_types[CallTest::kRtxRedPayloadType] =
+ CallTest::kRedPayloadType;
+ }
+ recv.sync_group = config.render.sync_group;
+ return recv;
+}
+} // namespace
+
+std::vector<RtpExtension> GetVideoRtpExtensions(
+ const VideoStreamConfig config) {
+ std::vector<RtpExtension> res = {
+ RtpExtension(RtpExtension::kVideoContentTypeUri,
+ kVideoContentTypeExtensionId),
+ RtpExtension(RtpExtension::kVideoRotationUri,
+ kVideoRotationRtpExtensionId)};
+ if (config.stream.packet_feedback) {
+ res.push_back(RtpExtension(RtpExtension::kTransportSequenceNumberUri,
+ kTransportSequenceNumberExtensionId));
+ }
+ if (config.stream.abs_send_time) {
+ res.push_back(
+ RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
+ }
+ return res;
+}
+
+SendVideoStream::SendVideoStream(CallClient* sender,
+ VideoStreamConfig config,
+ Transport* send_transport,
+ VideoFrameMatcher* matcher)
+ : sender_(sender), config_(config) {
+ video_capturer_ = std::make_unique<FrameGeneratorCapturer>(
+ sender_->clock_, CreateFrameGenerator(sender_->clock_, config.source),
+ config.source.framerate,
+ *sender->time_controller_->GetTaskQueueFactory());
+ video_capturer_->Init();
+
+ using Encoder = VideoStreamConfig::Encoder;
+ using Codec = VideoStreamConfig::Encoder::Codec;
+ switch (config.encoder.implementation) {
+ case Encoder::Implementation::kFake:
+ encoder_factory_ =
+ std::make_unique<FunctionVideoEncoderFactory>([this]() {
+ MutexLock lock(&mutex_);
+ std::unique_ptr<FakeEncoder> encoder;
+ if (config_.encoder.codec == Codec::kVideoCodecVP8) {
+ encoder = std::make_unique<test::FakeVp8Encoder>(sender_->clock_);
+ } else if (config_.encoder.codec == Codec::kVideoCodecGeneric) {
+ encoder = std::make_unique<test::FakeEncoder>(sender_->clock_);
+ } else {
+ RTC_DCHECK_NOTREACHED();
+ }
+ fake_encoders_.push_back(encoder.get());
+ if (config_.encoder.fake.max_rate.IsFinite())
+ encoder->SetMaxBitrate(config_.encoder.fake.max_rate.kbps());
+ return encoder;
+ });
+ break;
+ case VideoStreamConfig::Encoder::Implementation::kSoftware:
+ encoder_factory_.reset(new InternalEncoderFactory());
+ break;
+ case VideoStreamConfig::Encoder::Implementation::kHardware:
+ encoder_factory_ = CreateHardwareEncoderFactory();
+ break;
+ }
+ RTC_CHECK(encoder_factory_);
+
+ bitrate_allocator_factory_ = CreateBuiltinVideoBitrateAllocatorFactory();
+ RTC_CHECK(bitrate_allocator_factory_);
+
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config);
+ for (size_t i = 0; i < encoder_config.number_of_streams; ++i) {
+ ssrcs_.push_back(sender->GetNextVideoSsrc());
+ rtx_ssrcs_.push_back(sender->GetNextRtxSsrc());
+ }
+ VideoSendStream::Config send_config =
+ CreateVideoSendStreamConfig(config, ssrcs_, rtx_ssrcs_, send_transport);
+ send_config.encoder_settings.encoder_factory = encoder_factory_.get();
+ send_config.encoder_settings.bitrate_allocator_factory =
+ bitrate_allocator_factory_.get();
+ send_config.suspend_below_min_bitrate =
+ config.encoder.suspend_below_min_bitrate;
+
+ sender_->SendTask([&] {
+ if (config.stream.fec_controller_factory) {
+ send_stream_ = sender_->call_->CreateVideoSendStream(
+ std::move(send_config), std::move(encoder_config),
+ config.stream.fec_controller_factory->CreateFecController());
+ } else {
+ send_stream_ = sender_->call_->CreateVideoSendStream(
+ std::move(send_config), std::move(encoder_config));
+ }
+
+ if (matcher->Active()) {
+ frame_tap_ = std::make_unique<ForwardingCapturedFrameTap>(
+ sender_->clock_, matcher, video_capturer_.get());
+ send_stream_->SetSource(frame_tap_.get(),
+ config.encoder.degradation_preference);
+ } else {
+ send_stream_->SetSource(video_capturer_.get(),
+ config.encoder.degradation_preference);
+ }
+ });
+}
+
+SendVideoStream::~SendVideoStream() {
+ sender_->SendTask(
+ [this] { sender_->call_->DestroyVideoSendStream(send_stream_); });
+}
+
+void SendVideoStream::Start() {
+ sender_->SendTask([this] {
+ send_stream_->Start();
+ sender_->call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ });
+}
+
+void SendVideoStream::Stop() {
+ sender_->SendTask([this] { send_stream_->Stop(); });
+}
+
+void SendVideoStream::UpdateConfig(
+ std::function<void(VideoStreamConfig*)> modifier) {
+ sender_->SendTask([&] {
+ MutexLock lock(&mutex_);
+ VideoStreamConfig prior_config = config_;
+ modifier(&config_);
+ if (prior_config.encoder.fake.max_rate != config_.encoder.fake.max_rate) {
+ for (auto* encoder : fake_encoders_) {
+ encoder->SetMaxBitrate(config_.encoder.fake.max_rate.kbps());
+ }
+ }
+ // TODO(srte): Add more conditions that should cause reconfiguration.
+ if (prior_config.encoder.max_framerate != config_.encoder.max_framerate ||
+ prior_config.encoder.max_data_rate != config_.encoder.max_data_rate) {
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_);
+ send_stream_->ReconfigureVideoEncoder(std::move(encoder_config));
+ }
+ if (prior_config.source.framerate != config_.source.framerate) {
+ SetCaptureFramerate(config_.source.framerate);
+ }
+ });
+}
+
+void SendVideoStream::UpdateActiveLayers(std::vector<bool> active_layers) {
+ sender_->task_queue_.PostTask([=] {
+ MutexLock lock(&mutex_);
+ if (config_.encoder.codec ==
+ VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) {
+ send_stream_->StartPerRtpStream(active_layers);
+ }
+ VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_);
+ RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size());
+ for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i)
+ encoder_config.simulcast_layers[i].active = active_layers[i];
+ send_stream_->ReconfigureVideoEncoder(std::move(encoder_config));
+ });
+}
+
+bool SendVideoStream::UsingSsrc(uint32_t ssrc) const {
+ for (uint32_t owned : ssrcs_) {
+ if (owned == ssrc)
+ return true;
+ }
+ return false;
+}
+
+bool SendVideoStream::UsingRtxSsrc(uint32_t ssrc) const {
+ for (uint32_t owned : rtx_ssrcs_) {
+ if (owned == ssrc)
+ return true;
+ }
+ return false;
+}
+
+void SendVideoStream::SetCaptureFramerate(int framerate) {
+ sender_->SendTask([&] { video_capturer_->ChangeFramerate(framerate); });
+}
+
+VideoSendStream::Stats SendVideoStream::GetStats() const {
+ return send_stream_->GetStats();
+}
+
+ColumnPrinter SendVideoStream::StatsPrinter() {
+ return ColumnPrinter::Lambda(
+ "video_target_rate video_sent_rate width height",
+ [this](rtc::SimpleStringBuilder& sb) {
+ VideoSendStream::Stats video_stats = send_stream_->GetStats();
+ int width = 0;
+ int height = 0;
+ for (const auto& stream_stat : video_stats.substreams) {
+ width = std::max(width, stream_stat.second.width);
+ height = std::max(height, stream_stat.second.height);
+ }
+ sb.AppendFormat("%.0lf %.0lf %i %i",
+ video_stats.target_media_bitrate_bps / 8.0,
+ video_stats.media_bitrate_bps / 8.0, width, height);
+ },
+ 64);
+}
+
+ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver,
+ VideoStreamConfig config,
+ SendVideoStream* send_stream,
+ size_t chosen_stream,
+ Transport* feedback_transport,
+ VideoFrameMatcher* matcher)
+ : receiver_(receiver), config_(config) {
+ if (config.encoder.codec ==
+ VideoStreamConfig::Encoder::Codec::kVideoCodecGeneric ||
+ config.encoder.implementation == VideoStreamConfig::Encoder::kFake) {
+ decoder_factory_ = std::make_unique<FunctionVideoDecoderFactory>(
+ []() { return std::make_unique<FakeDecoder>(); });
+ } else {
+ decoder_factory_ = std::make_unique<InternalDecoderFactory>();
+ }
+
+ VideoReceiveStreamInterface::Decoder decoder =
+ CreateMatchingDecoder(CodecTypeToPayloadType(config.encoder.codec),
+ CodecTypeToPayloadString(config.encoder.codec));
+ size_t num_streams = config.encoder.simulcast_streams.size();
+ for (size_t i = 0; i < num_streams; ++i) {
+ rtc::VideoSinkInterface<VideoFrame>* renderer = &fake_renderer_;
+ if (matcher->Active()) {
+ render_taps_.emplace_back(
+ std::make_unique<DecodedFrameTap>(receiver_->clock_, matcher, i));
+ renderer = render_taps_.back().get();
+ }
+ auto recv_config = CreateVideoReceiveStreamConfig(
+ config, feedback_transport, decoder_factory_.get(), decoder, renderer,
+ receiver_->GetNextVideoLocalSsrc(), send_stream->ssrcs_[i],
+ send_stream->rtx_ssrcs_[i]);
+ if (config.stream.use_flexfec) {
+ RTC_DCHECK(num_streams == 1);
+ FlexfecReceiveStream::Config flexfec(feedback_transport);
+ flexfec.payload_type = CallTest::kFlexfecPayloadType;
+ flexfec.rtp.remote_ssrc = CallTest::kFlexfecSendSsrc;
+ flexfec.protected_media_ssrcs = send_stream->rtx_ssrcs_;
+ flexfec.rtp.local_ssrc = recv_config.rtp.local_ssrc;
+ receiver_->ssrc_media_types_[flexfec.rtp.remote_ssrc] = MediaType::VIDEO;
+
+ receiver_->SendTask([this, &flexfec] {
+ flecfec_stream_ = receiver_->call_->CreateFlexfecReceiveStream(flexfec);
+ });
+ }
+ receiver_->ssrc_media_types_[recv_config.rtp.remote_ssrc] =
+ MediaType::VIDEO;
+ if (config.stream.use_rtx)
+ receiver_->ssrc_media_types_[recv_config.rtp.rtx_ssrc] = MediaType::VIDEO;
+ receiver_->SendTask([this, &recv_config] {
+ receive_streams_.push_back(
+ receiver_->call_->CreateVideoReceiveStream(std::move(recv_config)));
+ });
+ }
+}
+
+ReceiveVideoStream::~ReceiveVideoStream() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ receiver_->call_->DestroyVideoReceiveStream(recv_stream);
+ if (flecfec_stream_)
+ receiver_->call_->DestroyFlexfecReceiveStream(flecfec_stream_);
+ });
+}
+
+void ReceiveVideoStream::Start() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ recv_stream->Start();
+ receiver_->call_->SignalChannelNetworkState(MediaType::VIDEO, kNetworkUp);
+ });
+}
+
+void ReceiveVideoStream::Stop() {
+ receiver_->SendTask([this] {
+ for (auto* recv_stream : receive_streams_)
+ recv_stream->Stop();
+ });
+}
+
+VideoReceiveStreamInterface::Stats ReceiveVideoStream::GetStats() const {
+ if (receive_streams_.empty())
+ return VideoReceiveStreamInterface::Stats();
+ // TODO(srte): Handle multiple receive streams.
+ return receive_streams_.back()->GetStats();
+}
+
+VideoStreamPair::~VideoStreamPair() = default;
+
+VideoStreamPair::VideoStreamPair(CallClient* sender,
+ CallClient* receiver,
+ VideoStreamConfig config)
+ : config_(config),
+ matcher_(config.hooks.frame_pair_handlers),
+ send_stream_(sender, config, sender->transport_.get(), &matcher_),
+ receive_stream_(receiver,
+ config,
+ &send_stream_,
+ /*chosen_stream=*/0,
+ receiver->transport_.get(),
+ &matcher_) {}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scenario/video_stream.h b/third_party/libwebrtc/test/scenario/video_stream.h
new file mode 100644
index 0000000000..43c51eab73
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_SCENARIO_VIDEO_STREAM_H_
+#define TEST_SCENARIO_VIDEO_STREAM_H_
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "rtc_base/synchronization/mutex.h"
+#include "test/fake_encoder.h"
+#include "test/fake_videorenderer.h"
+#include "test/frame_generator_capturer.h"
+#include "test/logging/log_writer.h"
+#include "test/scenario/call_client.h"
+#include "test/scenario/column_printer.h"
+#include "test/scenario/network_node.h"
+#include "test/scenario/scenario_config.h"
+#include "test/scenario/video_frame_matcher.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace test {
+// SendVideoStream provides an interface for changing parameters and retrieving
+// states at run time.
+class SendVideoStream {
+ public:
+ ~SendVideoStream();
+
+ SendVideoStream(const SendVideoStream&) = delete;
+ SendVideoStream& operator=(const SendVideoStream&) = delete;
+
+ void SetCaptureFramerate(int framerate);
+ VideoSendStream::Stats GetStats() const;
+ ColumnPrinter StatsPrinter();
+ void Start();
+ void Stop();
+ void UpdateConfig(std::function<void(VideoStreamConfig*)> modifier);
+ void UpdateActiveLayers(std::vector<bool> active_layers);
+ bool UsingSsrc(uint32_t ssrc) const;
+ bool UsingRtxSsrc(uint32_t ssrc) const;
+
+ private:
+ friend class Scenario;
+ friend class VideoStreamPair;
+ friend class ReceiveVideoStream;
+ // Handles RTCP feedback for this stream.
+ SendVideoStream(CallClient* sender,
+ VideoStreamConfig config,
+ Transport* send_transport,
+ VideoFrameMatcher* matcher);
+
+ Mutex mutex_;
+ std::vector<uint32_t> ssrcs_;
+ std::vector<uint32_t> rtx_ssrcs_;
+ VideoSendStream* send_stream_ = nullptr;
+ CallClient* const sender_;
+ VideoStreamConfig config_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<VideoEncoderFactory> encoder_factory_;
+ std::vector<test::FakeEncoder*> fake_encoders_ RTC_GUARDED_BY(mutex_);
+ std::unique_ptr<VideoBitrateAllocatorFactory> bitrate_allocator_factory_;
+ std::unique_ptr<FrameGeneratorCapturer> video_capturer_;
+ std::unique_ptr<ForwardingCapturedFrameTap> frame_tap_;
+ int next_local_network_id_ = 0;
+ int next_remote_network_id_ = 0;
+};
+
+// ReceiveVideoStream represents a video receiver. It can't be used directly.
+class ReceiveVideoStream {
+ public:
+ ~ReceiveVideoStream();
+
+ ReceiveVideoStream(const ReceiveVideoStream&) = delete;
+ ReceiveVideoStream& operator=(const ReceiveVideoStream&) = delete;
+
+ void Start();
+ void Stop();
+ VideoReceiveStreamInterface::Stats GetStats() const;
+
+ private:
+ friend class Scenario;
+ friend class VideoStreamPair;
+ ReceiveVideoStream(CallClient* receiver,
+ VideoStreamConfig config,
+ SendVideoStream* send_stream,
+ size_t chosen_stream,
+ Transport* feedback_transport,
+ VideoFrameMatcher* matcher);
+
+ std::vector<VideoReceiveStreamInterface*> receive_streams_;
+ FlexfecReceiveStream* flecfec_stream_ = nullptr;
+ FakeVideoRenderer fake_renderer_;
+ std::vector<std::unique_ptr<rtc::VideoSinkInterface<VideoFrame>>>
+ render_taps_;
+ CallClient* const receiver_;
+ const VideoStreamConfig config_;
+ std::unique_ptr<VideoDecoderFactory> decoder_factory_;
+};
+
+// VideoStreamPair represents a video streaming session. It can be used to
+// access underlying send and receive classes. It can also be used in calls to
+// the Scenario class.
+class VideoStreamPair {
+ public:
+ ~VideoStreamPair();
+
+ VideoStreamPair(const VideoStreamPair&) = delete;
+ VideoStreamPair& operator=(const VideoStreamPair&) = delete;
+
+ SendVideoStream* send() { return &send_stream_; }
+ ReceiveVideoStream* receive() { return &receive_stream_; }
+ VideoFrameMatcher* matcher() { return &matcher_; }
+
+ private:
+ friend class Scenario;
+ VideoStreamPair(CallClient* sender,
+ CallClient* receiver,
+ VideoStreamConfig config);
+
+ const VideoStreamConfig config_;
+
+ VideoFrameMatcher matcher_;
+ SendVideoStream send_stream_;
+ ReceiveVideoStream receive_stream_;
+};
+
+std::vector<RtpExtension> GetVideoRtpExtensions(const VideoStreamConfig config);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCENARIO_VIDEO_STREAM_H_
diff --git a/third_party/libwebrtc/test/scenario/video_stream_unittest.cc b/third_party/libwebrtc/test/scenario/video_stream_unittest.cc
new file mode 100644
index 0000000000..e53af4ef2b
--- /dev/null
+++ b/third_party/libwebrtc/test/scenario/video_stream_unittest.cc
@@ -0,0 +1,322 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <atomic>
+
+#include "api/test/network_emulation/create_cross_traffic.h"
+#include "api/test/network_emulation/cross_traffic.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/scenario/scenario.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+using Capture = VideoStreamConfig::Source::Capture;
+using ContentType = VideoStreamConfig::Encoder::ContentType;
+using Codec = VideoStreamConfig::Encoder::Codec;
+using CodecImpl = VideoStreamConfig::Encoder::Implementation;
+} // namespace
+
+TEST(VideoStreamTest, ReceivesFramesFromFileBasedStreams) {
+ TimeDelta kRunTime = TimeDelta::Millis(500);
+ std::vector<int> kFrameRates = {15, 30};
+ std::deque<std::atomic<int>> frame_counts(2);
+ frame_counts[0] = 0;
+ frame_counts[1] = 0;
+ {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {
+ [&](const VideoFramePair&) { frame_counts[0]++; }};
+ c->source.capture = Capture::kVideoFile;
+ c->source.video_file.name = "foreman_cif";
+ c->source.video_file.width = 352;
+ c->source.video_file.height = 288;
+ c->source.framerate = kFrameRates[0];
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ });
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {
+ [&](const VideoFramePair&) { frame_counts[1]++; }};
+ c->source.capture = Capture::kImageSlides;
+ c->source.slides.images.crop.width = 320;
+ c->source.slides.images.crop.height = 240;
+ c->source.framerate = kFrameRates[1];
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP9;
+ });
+ s.RunFor(kRunTime);
+ }
+ std::vector<int> expected_counts;
+ for (int fps : kFrameRates)
+ expected_counts.push_back(
+ static_cast<int>(kRunTime.seconds<double>() * fps * 0.8));
+
+ EXPECT_GE(frame_counts[0], expected_counts[0]);
+ EXPECT_GE(frame_counts[1], expected_counts[1]);
+}
+
+TEST(VideoStreamTest, ReceivesVp8SimulcastFrames) {
+ TimeDelta kRunTime = TimeDelta::Millis(500);
+ int kFrameRate = 30;
+
+ std::deque<std::atomic<int>> frame_counts(3);
+ frame_counts[0] = 0;
+ frame_counts[1] = 0;
+ frame_counts[2] = 0;
+ {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ // TODO(srte): Replace with code checking for all simulcast streams when
+ // there's a hook available for that.
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) {
+ frame_counts[info.layer_id]++;
+ RTC_DCHECK(info.decoded);
+ printf("%i: [%3i->%3i, %i], %i->%i, \n", info.layer_id, info.capture_id,
+ info.decode_id, info.repeated, info.captured->width(),
+ info.decoded->width());
+ }};
+ c->source.framerate = kFrameRate;
+ // The resolution must be high enough to allow smaller layers to be
+ // created.
+ c->source.generator.width = 1024;
+ c->source.generator.height = 768;
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ // Enable simulcast.
+ c->encoder.simulcast_streams = {webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T1,
+ webrtc::ScalabilityMode::kL1T1};
+
+ });
+ s.RunFor(kRunTime);
+ }
+
+ // Using high error margin to avoid flakyness.
+ const int kExpectedCount =
+ static_cast<int>(kRunTime.seconds<double>() * kFrameRate * 0.5);
+
+ EXPECT_GE(frame_counts[0], kExpectedCount);
+ EXPECT_GE(frame_counts[1], kExpectedCount);
+ EXPECT_GE(frame_counts[2], kExpectedCount);
+}
+
+TEST(VideoStreamTest, SendsNacksOnLoss) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.2;
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ // NACK retransmissions are enabled by default.
+ auto video = s.CreateVideoStream(route->forward(), VideoStreamConfig());
+ s.RunFor(TimeDelta::Seconds(1));
+ int retransmit_packets = 0;
+ VideoSendStream::Stats stats;
+ route->first()->SendTask([&]() { stats = video->send()->GetStats(); });
+ for (const auto& substream : stats.substreams) {
+ retransmit_packets += substream.second.rtp_stats.retransmitted.packets;
+ }
+ EXPECT_GT(retransmit_packets, 0);
+}
+
+TEST(VideoStreamTest, SendsFecWithUlpFec) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.1;
+ c->delay = TimeDelta::Millis(100);
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ // We do not allow NACK+ULPFEC for generic codec, using VP8.
+ c->encoder.codec = VideoStreamConfig::Encoder::Codec::kVideoCodecVP8;
+ c->stream.use_ulpfec = true;
+ });
+ s.RunFor(TimeDelta::Seconds(5));
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
+ EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
+}
+TEST(VideoStreamTest, SendsFecWithFlexFec) {
+ Scenario s;
+ auto route =
+ s.CreateRoutes(s.CreateClient("caller", CallClientConfig()),
+ {s.CreateSimulationNode([](NetworkSimulationConfig* c) {
+ c->loss_rate = 0.1;
+ c->delay = TimeDelta::Millis(100);
+ })},
+ s.CreateClient("callee", CallClientConfig()),
+ {s.CreateSimulationNode(NetworkSimulationConfig())});
+ auto video = s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->stream.use_flexfec = true;
+ });
+ s.RunFor(TimeDelta::Seconds(5));
+ VideoSendStream::Stats video_stats;
+ route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); });
+ EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u);
+}
+
+TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) {
+ // Declared before scenario to avoid use after free.
+ std::atomic<size_t> num_qvga_frames_(0);
+ std::atomic<size_t> num_vga_frames_(0);
+
+ Scenario s;
+ // Link has enough capacity for VGA.
+ NetworkSimulationConfig net_conf;
+ net_conf.bandwidth = DataRate::KilobitsPerSec(800);
+ net_conf.delay = TimeDelta::Millis(50);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ c->transport.rates.start_rate = DataRate::KilobitsPerSec(800);
+ });
+ auto send_net = {s.CreateSimulationNode(net_conf)};
+ auto ret_net = {s.CreateSimulationNode(net_conf)};
+ auto* route = s.CreateRoutes(
+ client, send_net, s.CreateClient("return", CallClientConfig()), ret_net);
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& info) {
+ if (info.decoded->width() == 640) {
+ ++num_vga_frames_;
+ } else if (info.decoded->width() == 320) {
+ ++num_qvga_frames_;
+ } else {
+ ADD_FAILURE() << "Unexpected resolution: " << info.decoded->width();
+ }
+ }};
+ c->source.framerate = 30;
+ // The resolution must be high enough to allow smaller layers to be
+ // created.
+ c->source.generator.width = 640;
+ c->source.generator.height = 480;
+ c->encoder.implementation = CodecImpl::kSoftware;
+ c->encoder.codec = Codec::kVideoCodecVP9;
+ // Enable SVC.
+ c->encoder.simulcast_streams = {webrtc::ScalabilityMode::kL2T1};
+ });
+
+ // Run for a few seconds, until streams have stabilized,
+ // check that we are sending VGA.
+ s.RunFor(TimeDelta::Seconds(5));
+ EXPECT_GT(num_vga_frames_, 0u);
+
+ // Trigger cross traffic, run until we have seen 3 consecutive
+ // seconds with no VGA frames due to reduced available bandwidth.
+ auto cross_traffic = s.net()->StartCrossTraffic(CreateFakeTcpCrossTraffic(
+ s.net()->CreateRoute(send_net), s.net()->CreateRoute(ret_net),
+ FakeTcpConfig()));
+
+ int num_seconds_without_vga = 0;
+ int num_iterations = 0;
+ do {
+ ASSERT_LE(++num_iterations, 100);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+ s.RunFor(TimeDelta::Seconds(1));
+ if (num_qvga_frames_ > 0 && num_vga_frames_ == 0) {
+ ++num_seconds_without_vga;
+ } else {
+ num_seconds_without_vga = 0;
+ }
+ } while (num_seconds_without_vga < 3);
+
+ // Stop cross traffic, make sure we recover and get VGA frames agian.
+ s.net()->StopCrossTraffic(cross_traffic);
+ num_qvga_frames_ = 0;
+ num_vga_frames_ = 0;
+
+ s.RunFor(TimeDelta::Seconds(40));
+ EXPECT_GT(num_qvga_frames_, 0u);
+ EXPECT_GT(num_vga_frames_, 0u);
+}
+
+TEST(VideoStreamTest, SuspendsBelowMinBitrate) {
+ const DataRate kMinVideoBitrate = DataRate::KilobitsPerSec(30);
+
+ // Declared before scenario to avoid use after free.
+ std::atomic<Timestamp> last_frame_timestamp(Timestamp::MinusInfinity());
+
+ Scenario s;
+ NetworkSimulationConfig net_config;
+ net_config.bandwidth = kMinVideoBitrate * 4;
+ net_config.delay = TimeDelta::Millis(10);
+ auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
+ // Min transmit rate needs to be lower than kMinVideoBitrate for this test
+ // to make sense.
+ c->transport.rates.min_rate = kMinVideoBitrate / 2;
+ c->transport.rates.start_rate = kMinVideoBitrate;
+ c->transport.rates.max_rate = kMinVideoBitrate * 2;
+ });
+ auto send_net = s.CreateMutableSimulationNode(
+ [&](NetworkSimulationConfig* c) { *c = net_config; });
+ auto ret_net = {s.CreateSimulationNode(net_config)};
+ auto* route =
+ s.CreateRoutes(client, {send_net->node()},
+ s.CreateClient("return", CallClientConfig()), ret_net);
+
+ s.CreateVideoStream(route->forward(), [&](VideoStreamConfig* c) {
+ c->hooks.frame_pair_handlers = {[&](const VideoFramePair& pair) {
+ if (pair.repeated == 0) {
+ last_frame_timestamp = pair.capture_time;
+ }
+ }};
+ c->source.framerate = 30;
+ c->source.generator.width = 320;
+ c->source.generator.height = 180;
+ c->encoder.implementation = CodecImpl::kFake;
+ c->encoder.codec = Codec::kVideoCodecVP8;
+ c->encoder.min_data_rate = kMinVideoBitrate;
+ c->encoder.suspend_below_min_bitrate = true;
+ c->stream.pad_to_rate = kMinVideoBitrate;
+ });
+
+ // Run for a few seconds, check we have received at least one frame.
+ s.RunFor(TimeDelta::Seconds(2));
+ EXPECT_TRUE(last_frame_timestamp.load().IsFinite());
+
+ // Degrade network to below min bitrate.
+ send_net->UpdateConfig([&](NetworkSimulationConfig* c) {
+ c->bandwidth = kMinVideoBitrate * 0.9;
+ });
+
+ // Run for 20s, verify that no frames arrive that were captured after the
+ // first five seconds, allowing some margin for BWE backoff to trigger and
+ // packets already in the pipeline to potentially arrive.
+ s.RunFor(TimeDelta::Seconds(20));
+ EXPECT_GT(s.Now() - last_frame_timestamp, TimeDelta::Seconds(15));
+
+ // Relax the network constraints and run for a while more, verify that we
+ // start receiving frames again.
+ send_net->UpdateConfig(
+ [&](NetworkSimulationConfig* c) { c->bandwidth = kMinVideoBitrate * 4; });
+ last_frame_timestamp = Timestamp::MinusInfinity();
+ s.RunFor(TimeDelta::Seconds(15));
+ EXPECT_TRUE(last_frame_timestamp.load().IsFinite());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scoped_key_value_config.cc b/third_party/libwebrtc/test/scoped_key_value_config.cc
new file mode 100644
index 0000000000..df84462637
--- /dev/null
+++ b/third_party/libwebrtc/test/scoped_key_value_config.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/scoped_key_value_config.h"
+
+#include "rtc_base/checks.h"
+#include "system_wrappers/include/field_trial.h"
+#include "test/field_trial.h"
+
+namespace {
+
+// This part is copied from system_wrappers/field_trial.cc.
+void InsertIntoMap(
+ std::map<std::string, std::string, std::less<>>& key_value_map,
+ absl::string_view s) {
+ std::string::size_type field_start = 0;
+ while (field_start < s.size()) {
+ std::string::size_type separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing separator '/' after field trial key.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial key cannot be empty.";
+ std::string key(s.substr(field_start, separator_pos - field_start));
+ field_start = separator_pos + 1;
+
+ RTC_CHECK_LT(field_start, s.size())
+ << "Missing value after field trial key. String ended.";
+ separator_pos = s.find('/', field_start);
+ RTC_CHECK_NE(separator_pos, std::string::npos)
+ << "Missing terminating '/' in field trial string.";
+ RTC_CHECK_GT(separator_pos, field_start)
+ << "Field trial value cannot be empty.";
+ std::string value(s.substr(field_start, separator_pos - field_start));
+ field_start = separator_pos + 1;
+
+ key_value_map[key] = value;
+ }
+ // This check is technically redundant due to earlier checks.
+ // We nevertheless keep the check to make it clear that the entire
+ // string has been processed, and without indexing past the end.
+ RTC_CHECK_EQ(field_start, s.size());
+}
+
+} // namespace
+
+namespace webrtc {
+namespace test {
+
+ScopedKeyValueConfig::ScopedKeyValueConfig()
+ : ScopedKeyValueConfig(nullptr, "") {}
+
+ScopedKeyValueConfig::ScopedKeyValueConfig(absl::string_view s)
+ : ScopedKeyValueConfig(nullptr, s) {}
+
+ScopedKeyValueConfig::ScopedKeyValueConfig(ScopedKeyValueConfig& parent,
+ absl::string_view s)
+ : ScopedKeyValueConfig(&parent, s) {}
+
+ScopedKeyValueConfig::ScopedKeyValueConfig(ScopedKeyValueConfig* parent,
+ absl::string_view s)
+ : parent_(parent), leaf_(nullptr) {
+ InsertIntoMap(key_value_map_, s);
+
+ if (!s.empty()) {
+ // Also store field trials in global string (until we get rid of it).
+ scoped_field_trials_ = std::make_unique<ScopedFieldTrials>(s);
+ }
+
+ if (parent == nullptr) {
+ // We are root, set leaf_.
+ leaf_ = this;
+ } else {
+ // Link root to new leaf.
+ GetRoot(parent)->leaf_ = this;
+ RTC_DCHECK(leaf_ == nullptr);
+ }
+}
+
+ScopedKeyValueConfig::~ScopedKeyValueConfig() {
+ if (parent_) {
+ GetRoot(parent_)->leaf_ = parent_;
+ }
+}
+
+ScopedKeyValueConfig* ScopedKeyValueConfig::GetRoot(ScopedKeyValueConfig* n) {
+ while (n->parent_ != nullptr) {
+ n = n->parent_;
+ }
+ return n;
+}
+
+std::string ScopedKeyValueConfig::GetValue(absl::string_view key) const {
+ if (parent_ == nullptr) {
+ return leaf_->LookupRecurse(key);
+ } else {
+ return LookupRecurse(key);
+ }
+}
+
+std::string ScopedKeyValueConfig::LookupRecurse(absl::string_view key) const {
+ auto it = key_value_map_.find(key);
+ if (it != key_value_map_.end())
+ return it->second;
+
+ if (parent_) {
+ return parent_->LookupRecurse(key);
+ }
+
+ // When at the root, check the global string so that test programs using
+ // a mix between ScopedKeyValueConfig and the global string continue to work
+ return webrtc::field_trial::FindFullName(std::string(key));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/scoped_key_value_config.h b/third_party/libwebrtc/test/scoped_key_value_config.h
new file mode 100644
index 0000000000..c0023f8228
--- /dev/null
+++ b/third_party/libwebrtc/test/scoped_key_value_config.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_SCOPED_KEY_VALUE_CONFIG_H_
+#define TEST_SCOPED_KEY_VALUE_CONFIG_H_
+
+#include <functional>
+#include <map>
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/field_trials_registry.h"
+#include "test/field_trial.h"
+
+namespace webrtc {
+namespace test {
+
+class ScopedKeyValueConfig : public FieldTrialsRegistry {
+ public:
+ virtual ~ScopedKeyValueConfig();
+ ScopedKeyValueConfig();
+ explicit ScopedKeyValueConfig(absl::string_view s);
+ ScopedKeyValueConfig(ScopedKeyValueConfig& parent, absl::string_view s);
+
+ private:
+ ScopedKeyValueConfig(ScopedKeyValueConfig* parent, absl::string_view s);
+ ScopedKeyValueConfig* GetRoot(ScopedKeyValueConfig* n);
+ std::string GetValue(absl::string_view key) const override;
+ std::string LookupRecurse(absl::string_view key) const;
+
+ ScopedKeyValueConfig* const parent_;
+
+ // The leaf in a list of stacked ScopedKeyValueConfig.
+ // Only set on root (e.g with parent_ == nullptr).
+ const ScopedKeyValueConfig* leaf_;
+
+ // Unlike std::less<std::string>, std::less<> is transparent and allows
+ // heterogeneous lookup directly with absl::string_view.
+ std::map<std::string, std::string, std::less<>> key_value_map_;
+ std::unique_ptr<ScopedFieldTrials> scoped_field_trials_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_SCOPED_KEY_VALUE_CONFIG_H_
diff --git a/third_party/libwebrtc/test/test_flags.cc b/third_party/libwebrtc/test/test_flags.cc
new file mode 100644
index 0000000000..a0fff747fe
--- /dev/null
+++ b/third_party/libwebrtc/test/test_flags.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/test_flags.h"
+
+#include <string>
+#include <vector>
+
+#include "absl/flags/flag.h"
+
+ABSL_FLAG(std::string,
+ force_fieldtrials,
+ "",
+ "Field trials control experimental feature code which can be forced. "
+ "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
+ " will assign the group Enable to field trial WebRTC-FooFeature.");
+
+ABSL_FLAG(std::vector<std::string>,
+ plot,
+ {},
+ "List of metrics that should be exported for plotting (if they are "
+ "available). Example: psnr,ssim,encode_time. To plot all available "
+ " metrics pass 'all' as flag value");
+
+ABSL_FLAG(
+ std::string,
+ isolated_script_test_perf_output,
+ "",
+ "Path where the perf results should be stored in proto format described "
+ "described by histogram.proto in "
+ "https://chromium.googlesource.com/catapult/.");
+
+ABSL_FLAG(std::string,
+ webrtc_test_metrics_output_path,
+ "",
+ "Path where the test perf metrics should be stored using "
+ "api/test/metrics/metric.proto proto format. File will contain "
+ "MetricsSet as a root proto. On iOS, this MUST be a file name "
+ "and the file will be stored under NSDocumentDirectory.");
+
+ABSL_FLAG(bool,
+ export_perf_results_new_api,
+ false,
+ "Tells to initialize new API for exporting performance metrics");
diff --git a/third_party/libwebrtc/test/test_flags.h b/third_party/libwebrtc/test/test_flags.h
new file mode 100644
index 0000000000..30f918fc7d
--- /dev/null
+++ b/third_party/libwebrtc/test/test_flags.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TEST_FLAGS_H_
+#define TEST_TEST_FLAGS_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/flags/declare.h"
+
+ABSL_DECLARE_FLAG(std::string, force_fieldtrials);
+ABSL_DECLARE_FLAG(std::vector<std::string>, plot);
+ABSL_DECLARE_FLAG(std::string, isolated_script_test_perf_output);
+ABSL_DECLARE_FLAG(std::string, webrtc_test_metrics_output_path);
+ABSL_DECLARE_FLAG(bool, export_perf_results_new_api);
+
+#endif // TEST_TEST_FLAGS_H_
diff --git a/third_party/libwebrtc/test/test_main.cc b/third_party/libwebrtc/test/test_main.cc
new file mode 100644
index 0000000000..d811fd0e6d
--- /dev/null
+++ b/third_party/libwebrtc/test/test_main.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <regex>
+#include <string>
+#include <vector>
+
+#include "absl/debugging/failure_signal_handler.h"
+#include "absl/debugging/symbolize.h"
+#include "absl/flags/parse.h"
+#include "test/gmock.h"
+#include "test/test_main_lib.h"
+
+namespace {
+
+std::vector<std::string> ReplaceDashesWithUnderscores(int argc, char* argv[]) {
+ std::vector<std::string> args(argv, argv + argc);
+ for (std::string& arg : args) {
+ // Only replace arguments that starts with a dash.
+ if (!arg.empty() && arg[0] == '-') {
+ // Don't replace the 2 first characters.
+ auto begin = arg.begin() + 2;
+ // Replace dashes on the left of '=' or on all the arg if no '=' is found.
+ auto end = std::find(arg.begin(), arg.end(), '=');
+ std::replace(begin, end, '-', '_');
+ }
+ }
+ return args;
+}
+
+std::vector<char*> VectorOfStringsToVectorOfPointers(
+ std::vector<std::string>& input) {
+ std::vector<char*> output(input.size());
+ for (size_t i = 0; i < input.size(); ++i) {
+ output[i] = &(input[i][0]);
+ }
+ return output;
+}
+
+} // namespace
+
+int main(int argc, char* argv[]) {
+ // Initialize the symbolizer to get a human-readable stack trace
+ absl::InitializeSymbolizer(argv[0]);
+ testing::InitGoogleMock(&argc, argv);
+ // Before parsing the arguments with the absl flag library, any internal '-'
+ // characters will be converted to '_' characters to make sure the string is a
+ // valid attribute name.
+ std::vector<std::string> new_argv = ReplaceDashesWithUnderscores(argc, argv);
+ std::vector<char*> raw_new_argv = VectorOfStringsToVectorOfPointers(new_argv);
+ absl::ParseCommandLine(argc, &raw_new_argv[0]);
+
+// This absl handler use unsupported features/instructions on Fuchsia
+#if !defined(WEBRTC_FUCHSIA)
+ absl::FailureSignalHandlerOptions options;
+ absl::InstallFailureSignalHandler(options);
+#endif
+
+ std::unique_ptr<webrtc::TestMain> main = webrtc::TestMain::Create();
+ int err_code = main->Init();
+ if (err_code != 0) {
+ return err_code;
+ }
+ return main->Run(argc, argv);
+}
diff --git a/third_party/libwebrtc/test/test_main_lib.cc b/third_party/libwebrtc/test/test_main_lib.cc
new file mode 100644
index 0000000000..4c80315ac5
--- /dev/null
+++ b/third_party/libwebrtc/test/test_main_lib.cc
@@ -0,0 +1,267 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/test_main_lib.h"
+
+#include <fstream>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/flags/flag.h"
+#include "absl/memory/memory.h"
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
+#include "api/test/metrics/global_metrics_logger_and_exporter.h"
+#include "api/test/metrics/metrics_exporter.h"
+#include "api/test/metrics/metrics_set_proto_file_exporter.h"
+#include "api/test/metrics/print_result_proxy_metrics_exporter.h"
+#include "api/test/metrics/stdout_metrics_exporter.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/event_tracer.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ssl_adapter.h"
+#include "rtc_base/ssl_stream_adapter.h"
+#include "rtc_base/thread.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+#include "test/test_flags.h"
+#include "test/testsupport/perf_test.h"
+#include "test/testsupport/resources_dir_flag.h"
+
+#if defined(WEBRTC_WIN)
+#include "rtc_base/win32_socket_init.h"
+#endif
+
+#if defined(WEBRTC_IOS)
+#include "test/ios/test_support.h"
+
+ABSL_FLAG(std::string,
+ NSTreatUnknownArgumentsAsOpen,
+ "",
+ "Intentionally ignored flag intended for iOS test runner.");
+ABSL_FLAG(std::string,
+ ApplePersistenceIgnoreState,
+ "",
+ "Intentionally ignored flag intended for iOS test runner.");
+ABSL_FLAG(bool,
+ enable_run_ios_unittests_with_xctest,
+ false,
+ "Intentionally ignored flag intended for iOS test runner.");
+ABSL_FLAG(bool,
+ write_compiled_tests_json_to_writable_path,
+ false,
+ "Intentionally ignored flag intended for iOS test runner.");
+
+// This is the cousin of isolated_script_test_perf_output, but we can't dictate
+// where to write on iOS so the semantics of this flag are a bit different.
+ABSL_FLAG(
+ bool,
+ write_perf_output_on_ios,
+ false,
+ "Store the perf results in Documents/perftest_result.pb in the format "
+ "described by histogram.proto in "
+ "https://chromium.googlesource.com/catapult/.");
+
+#elif defined(WEBRTC_FUCHSIA)
+ABSL_FLAG(std::string, use_vulkan, "", "Intentionally ignored flag.");
+#else
+// TODO(bugs.webrtc.org/8115): Remove workaround when fixed.
+ABSL_FLAG(bool, no_sandbox, false, "Intentionally ignored flag.");
+ABSL_FLAG(bool, test_launcher_bot_mode, false, "Intentionally ignored flag.");
+#endif
+
+ABSL_FLAG(std::string,
+ isolated_script_test_output,
+ "",
+ "Path to output an empty JSON file which Chromium infra requires.");
+
+ABSL_FLAG(bool, logs, true, "print logs to stderr");
+ABSL_FLAG(bool, verbose, false, "verbose logs to stderr");
+
+ABSL_FLAG(std::string,
+ trace_event,
+ "",
+ "Path to collect trace events (json file) for chrome://tracing. "
+ "If not set, events aren't captured.");
+
+ABSL_FLAG(std::string,
+ test_launcher_shard_index,
+ "",
+ "Index of the test shard to run, from 0 to "
+ "the value specified with --test_launcher_total_shards.");
+
+ABSL_FLAG(std::string,
+ test_launcher_total_shards,
+ "",
+ "Total number of shards.");
+
+namespace webrtc {
+
+namespace {
+
+constexpr char kPlotAllMetrics[] = "all";
+
+class TestMainImpl : public TestMain {
+ public:
+ int Init(int* argc, char* argv[]) override { return Init(); }
+
+ int Init() override {
+ // Make sure we always pull in the --resources_dir flag, even if the test
+ // binary doesn't link with fileutils (downstream expects all test mains to
+ // have this flag).
+ (void)absl::GetFlag(FLAGS_resources_dir);
+
+ // Default to LS_INFO, even for release builds to provide better test
+ // logging.
+ if (rtc::LogMessage::GetLogToDebug() > rtc::LS_INFO)
+ rtc::LogMessage::LogToDebug(rtc::LS_INFO);
+
+ if (absl::GetFlag(FLAGS_verbose))
+ rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE);
+
+ rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs) ||
+ absl::GetFlag(FLAGS_verbose));
+
+ // The sharding arguments take precedence over the sharding environment
+ // variables.
+ if (!absl::GetFlag(FLAGS_test_launcher_shard_index).empty() &&
+ !absl::GetFlag(FLAGS_test_launcher_total_shards).empty()) {
+ std::string shard_index =
+ "GTEST_SHARD_INDEX=" + absl::GetFlag(FLAGS_test_launcher_shard_index);
+ std::string total_shards =
+ "GTEST_TOTAL_SHARDS=" +
+ absl::GetFlag(FLAGS_test_launcher_total_shards);
+ putenv(shard_index.data());
+ putenv(total_shards.data());
+ }
+
+ // InitFieldTrialsFromString stores the char*, so the char array must
+ // outlive the application.
+ field_trials_ = absl::GetFlag(FLAGS_force_fieldtrials);
+ webrtc::field_trial::InitFieldTrialsFromString(field_trials_.c_str());
+ webrtc::metrics::Enable();
+
+#if defined(WEBRTC_WIN)
+ winsock_init_ = std::make_unique<rtc::WinsockInitializer>();
+#endif
+
+ // Initialize SSL which are used by several tests.
+ rtc::InitializeSSL();
+ rtc::SSLStreamAdapter::EnableTimeCallbackForTesting();
+
+ return 0;
+ }
+
+ int Run(int argc, char* argv[]) override {
+ std::string trace_event_path = absl::GetFlag(FLAGS_trace_event);
+ const bool capture_events = !trace_event_path.empty();
+ if (capture_events) {
+ rtc::tracing::SetupInternalTracer();
+ rtc::tracing::StartInternalCapture(trace_event_path);
+ }
+
+ absl::optional<std::vector<std::string>> metrics_to_plot =
+ absl::GetFlag(FLAGS_plot);
+
+ if (metrics_to_plot->empty()) {
+ metrics_to_plot = absl::nullopt;
+ } else {
+ if (metrics_to_plot->size() == 1 &&
+ (*metrics_to_plot)[0] == kPlotAllMetrics) {
+ metrics_to_plot->clear();
+ }
+ }
+
+#if defined(WEBRTC_IOS)
+ rtc::test::InitTestSuite(
+ RUN_ALL_TESTS, argc, argv,
+ absl::GetFlag(FLAGS_write_perf_output_on_ios),
+ absl::GetFlag(FLAGS_export_perf_results_new_api),
+ absl::GetFlag(FLAGS_webrtc_test_metrics_output_path), metrics_to_plot);
+ rtc::test::RunTestsFromIOSApp();
+ int exit_code = 0;
+#else
+ int exit_code = RUN_ALL_TESTS();
+
+ std::vector<std::unique_ptr<test::MetricsExporter>> exporters;
+ if (absl::GetFlag(FLAGS_export_perf_results_new_api)) {
+ exporters.push_back(std::make_unique<test::StdoutMetricsExporter>());
+ if (!absl::GetFlag(FLAGS_webrtc_test_metrics_output_path).empty()) {
+ exporters.push_back(
+ std::make_unique<webrtc::test::MetricsSetProtoFileExporter>(
+ webrtc::test::MetricsSetProtoFileExporter::Options(
+ absl::GetFlag(FLAGS_webrtc_test_metrics_output_path))));
+ }
+ if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) {
+ exporters.push_back(
+ std::make_unique<test::ChromePerfDashboardMetricsExporter>(
+ absl::GetFlag(FLAGS_isolated_script_test_perf_output)));
+ }
+ } else {
+ exporters.push_back(
+ std::make_unique<test::PrintResultProxyMetricsExporter>());
+ }
+ test::ExportPerfMetric(*test::GetGlobalMetricsLogger(),
+ std::move(exporters));
+ if (!absl::GetFlag(FLAGS_export_perf_results_new_api)) {
+ std::string perf_output_file =
+ absl::GetFlag(FLAGS_isolated_script_test_perf_output);
+ if (!perf_output_file.empty()) {
+ if (!webrtc::test::WritePerfResults(perf_output_file)) {
+ return 1;
+ }
+ }
+ if (metrics_to_plot) {
+ webrtc::test::PrintPlottableResults(*metrics_to_plot);
+ }
+ }
+
+ std::string result_filename =
+ absl::GetFlag(FLAGS_isolated_script_test_output);
+ if (!result_filename.empty()) {
+ std::ofstream result_file(result_filename);
+ result_file << "{\"version\": 3}";
+ }
+#endif
+
+ if (capture_events) {
+ rtc::tracing::StopInternalCapture();
+ }
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
+ defined(UNDEFINED_SANITIZER)
+ // We want the test flagged as failed only for sanitizer defects,
+ // in which case the sanitizer will override exit code with 66.
+ exit_code = 0;
+#endif
+
+ return exit_code;
+ }
+
+ ~TestMainImpl() override = default;
+
+ private:
+#if defined(WEBRTC_WIN)
+ std::unique_ptr<rtc::WinsockInitializer> winsock_init_;
+#endif
+};
+
+} // namespace
+
+std::unique_ptr<TestMain> TestMain::Create() {
+ return std::make_unique<TestMainImpl>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/test_main_lib.h b/third_party/libwebrtc/test/test_main_lib.h
new file mode 100644
index 0000000000..2233171c60
--- /dev/null
+++ b/third_party/libwebrtc/test/test_main_lib.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TEST_MAIN_LIB_H_
+#define TEST_TEST_MAIN_LIB_H_
+
+#include <memory>
+#include <string>
+
+namespace webrtc {
+
+// Class to initialize test environment and run tests.
+class TestMain {
+ public:
+ virtual ~TestMain() {}
+
+ static std::unique_ptr<TestMain> Create();
+
+ // Initializes test environment. Clients can add their own initialization
+ // steps after call to this method and before running tests.
+ // Returns 0 if initialization was successful and non 0 otherwise.
+ virtual int Init() = 0;
+ // Temporary for backward compatibility
+ virtual int Init(int* argc, char* argv[]) = 0;
+
+ // Runs test end return result error code. 0 - no errors.
+ virtual int Run(int argc, char* argv[]) = 0;
+
+ protected:
+ TestMain() = default;
+
+ std::string field_trials_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_TEST_MAIN_LIB_H_
diff --git a/third_party/libwebrtc/test/test_video_capturer.cc b/third_party/libwebrtc/test/test_video_capturer.cc
new file mode 100644
index 0000000000..4a4adc61d7
--- /dev/null
+++ b/third_party/libwebrtc/test/test_video_capturer.cc
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/test_video_capturer.h"
+
+#include <algorithm>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+
+namespace webrtc {
+namespace test {
+TestVideoCapturer::~TestVideoCapturer() = default;
+
+void TestVideoCapturer::OnOutputFormatRequest(
+ int width,
+ int height,
+ const absl::optional<int>& max_fps) {
+ absl::optional<std::pair<int, int>> target_aspect_ratio =
+ std::make_pair(width, height);
+ absl::optional<int> max_pixel_count = width * height;
+ video_adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count,
+ max_fps);
+}
+
+void TestVideoCapturer::OnFrame(const VideoFrame& original_frame) {
+ int cropped_width = 0;
+ int cropped_height = 0;
+ int out_width = 0;
+ int out_height = 0;
+
+ VideoFrame frame = MaybePreprocess(original_frame);
+
+ if (!video_adapter_.AdaptFrameResolution(
+ frame.width(), frame.height(), frame.timestamp_us() * 1000,
+ &cropped_width, &cropped_height, &out_width, &out_height)) {
+ // Drop frame in order to respect frame rate constraint.
+ return;
+ }
+
+ if (out_height != frame.height() || out_width != frame.width()) {
+ // Video adapter has requested a down-scale. Allocate a new buffer and
+ // return scaled version.
+ // For simplicity, only scale here without cropping.
+ rtc::scoped_refptr<I420Buffer> scaled_buffer =
+ I420Buffer::Create(out_width, out_height);
+ scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420());
+ VideoFrame::Builder new_frame_builder =
+ VideoFrame::Builder()
+ .set_video_frame_buffer(scaled_buffer)
+ .set_rotation(kVideoRotation_0)
+ .set_timestamp_us(frame.timestamp_us())
+ .set_id(frame.id());
+ if (frame.has_update_rect()) {
+ VideoFrame::UpdateRect new_rect = frame.update_rect().ScaleWithFrame(
+ frame.width(), frame.height(), 0, 0, frame.width(), frame.height(),
+ out_width, out_height);
+ new_frame_builder.set_update_rect(new_rect);
+ }
+ broadcaster_.OnFrame(new_frame_builder.build());
+
+ } else {
+ // No adaptations needed, just return the frame as is.
+ broadcaster_.OnFrame(frame);
+ }
+}
+
+rtc::VideoSinkWants TestVideoCapturer::GetSinkWants() {
+ return broadcaster_.wants();
+}
+
+void TestVideoCapturer::AddOrUpdateSink(
+ rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ UpdateVideoAdapter();
+}
+
+void TestVideoCapturer::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
+ broadcaster_.RemoveSink(sink);
+ UpdateVideoAdapter();
+}
+
+void TestVideoCapturer::UpdateVideoAdapter() {
+ video_adapter_.OnSinkWants(broadcaster_.wants());
+}
+
+VideoFrame TestVideoCapturer::MaybePreprocess(const VideoFrame& frame) {
+ MutexLock lock(&lock_);
+ if (preprocessor_ != nullptr) {
+ return preprocessor_->Preprocess(frame);
+ } else {
+ return frame;
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/test_video_capturer.h b/third_party/libwebrtc/test/test_video_capturer.h
new file mode 100644
index 0000000000..6fafd96efb
--- /dev/null
+++ b/third_party/libwebrtc/test/test_video_capturer.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TEST_VIDEO_CAPTURER_H_
+#define TEST_TEST_VIDEO_CAPTURER_H_
+
+#include <stddef.h>
+
+#include <memory>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_source_interface.h"
+#include "media/base/video_adapter.h"
+#include "media/base/video_broadcaster.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace test {
+
+class TestVideoCapturer : public rtc::VideoSourceInterface<VideoFrame> {
+ public:
+ class FramePreprocessor {
+ public:
+ virtual ~FramePreprocessor() = default;
+
+ virtual VideoFrame Preprocess(const VideoFrame& frame) = 0;
+ };
+
+ ~TestVideoCapturer() override;
+
+ void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
+ void SetFramePreprocessor(std::unique_ptr<FramePreprocessor> preprocessor) {
+ MutexLock lock(&lock_);
+ preprocessor_ = std::move(preprocessor);
+ }
+ void OnOutputFormatRequest(int width,
+ int height,
+ const absl::optional<int>& max_fps);
+
+ protected:
+ void OnFrame(const VideoFrame& frame);
+ rtc::VideoSinkWants GetSinkWants();
+
+ private:
+ void UpdateVideoAdapter();
+ VideoFrame MaybePreprocess(const VideoFrame& frame);
+
+ Mutex lock_;
+ std::unique_ptr<FramePreprocessor> preprocessor_ RTC_GUARDED_BY(lock_);
+ rtc::VideoBroadcaster broadcaster_;
+ cricket::VideoAdapter video_adapter_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TEST_VIDEO_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/testsupport/DEPS b/third_party/libwebrtc/test/testsupport/DEPS
new file mode 100644
index 0000000000..6f6150ad30
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ # Histogram C++ API, used by perf tests.
+ "+third_party/catapult/tracing/tracing/value"
+]
diff --git a/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.cc b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.cc
new file mode 100644
index 0000000000..6de8e7fd99
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/copy_to_file_audio_capturer.h"
+
+#include <memory>
+#include <utility>
+
+namespace webrtc {
+namespace test {
+
+CopyToFileAudioCapturer::CopyToFileAudioCapturer(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> delegate,
+ std::string stream_dump_file_name)
+ : delegate_(std::move(delegate)),
+ wav_writer_(std::make_unique<WavWriter>(std::move(stream_dump_file_name),
+ delegate_->SamplingFrequency(),
+ delegate_->NumChannels())) {}
+CopyToFileAudioCapturer::~CopyToFileAudioCapturer() = default;
+
+int CopyToFileAudioCapturer::SamplingFrequency() const {
+ return delegate_->SamplingFrequency();
+}
+
+int CopyToFileAudioCapturer::NumChannels() const {
+ return delegate_->NumChannels();
+}
+
+bool CopyToFileAudioCapturer::Capture(rtc::BufferT<int16_t>* buffer) {
+ bool result = delegate_->Capture(buffer);
+ if (result) {
+ wav_writer_->WriteSamples(buffer->data(), buffer->size());
+ }
+ return result;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.h b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.h
new file mode 100644
index 0000000000..a410beeea8
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_COPY_TO_FILE_AUDIO_CAPTURER_H_
+#define TEST_TESTSUPPORT_COPY_TO_FILE_AUDIO_CAPTURER_H_
+
+#include <memory>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "common_audio/wav_file.h"
+#include "modules/audio_device/include/test_audio_device.h"
+#include "rtc_base/buffer.h"
+
+namespace webrtc {
+namespace test {
+
+// TestAudioDeviceModule::Capturer that will store audio data, captured by
+// delegate to the specified output file. Can be used to create a copy of
+// generated audio data to be able then to compare it as a reference with
+// audio on the TestAudioDeviceModule::Renderer side.
+class CopyToFileAudioCapturer : public TestAudioDeviceModule::Capturer {
+ public:
+ CopyToFileAudioCapturer(
+ std::unique_ptr<TestAudioDeviceModule::Capturer> delegate,
+ std::string stream_dump_file_name);
+ ~CopyToFileAudioCapturer() override;
+
+ int SamplingFrequency() const override;
+ int NumChannels() const override;
+ bool Capture(rtc::BufferT<int16_t>* buffer) override;
+
+ private:
+ std::unique_ptr<TestAudioDeviceModule::Capturer> delegate_;
+ std::unique_ptr<WavWriter> wav_writer_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_COPY_TO_FILE_AUDIO_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer_unittest.cc b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer_unittest.cc
new file mode 100644
index 0000000000..3831c28580
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/copy_to_file_audio_capturer_unittest.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/copy_to_file_audio_capturer.h"
+
+#include <memory>
+#include <utility>
+
+#include "modules/audio_device/include/test_audio_device.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+namespace webrtc {
+namespace test {
+
+class CopyToFileAudioCapturerTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ temp_filename_ = webrtc::test::TempFilename(
+ webrtc::test::OutputPath(), "copy_to_file_audio_capturer_unittest");
+ std::unique_ptr<TestAudioDeviceModule::Capturer> delegate =
+ TestAudioDeviceModule::CreatePulsedNoiseCapturer(32000, 48000);
+ capturer_ = std::make_unique<CopyToFileAudioCapturer>(std::move(delegate),
+ temp_filename_);
+ }
+
+ void TearDown() override { ASSERT_EQ(remove(temp_filename_.c_str()), 0); }
+
+ std::unique_ptr<CopyToFileAudioCapturer> capturer_;
+ std::string temp_filename_;
+};
+
+TEST_F(CopyToFileAudioCapturerTest, Capture) {
+ rtc::BufferT<int16_t> expected_buffer;
+ ASSERT_TRUE(capturer_->Capture(&expected_buffer));
+ ASSERT_TRUE(!expected_buffer.empty());
+ // Destruct capturer to close wav file.
+ capturer_.reset(nullptr);
+
+ // Read resulted file content with `wav_file_capture` and compare with
+ // what was captured.
+ std::unique_ptr<TestAudioDeviceModule::Capturer> wav_file_capturer =
+ TestAudioDeviceModule::CreateWavFileReader(temp_filename_, 48000);
+ rtc::BufferT<int16_t> actual_buffer;
+ wav_file_capturer->Capture(&actual_buffer);
+ ASSERT_EQ(actual_buffer, expected_buffer);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/file_utils.cc b/third_party/libwebrtc/test/testsupport/file_utils.cc
new file mode 100644
index 0000000000..ff0d5a854c
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/file_utils.cc
@@ -0,0 +1,250 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/file_utils.h"
+
+
+#if defined(WEBRTC_POSIX)
+#include <unistd.h>
+#endif
+
+#if defined(WEBRTC_WIN)
+#include <direct.h>
+#include <tchar.h>
+#include <windows.h>
+
+#include <algorithm>
+#include <codecvt>
+#include <locale>
+
+#include "Shlwapi.h"
+#include "WinDef.h"
+#include "rtc_base/win32.h"
+
+#define GET_CURRENT_DIR _getcwd
+#else
+#include <dirent.h>
+
+#define GET_CURRENT_DIR getcwd
+#endif
+
+#include <sys/stat.h> // To check for directory existence.
+#ifndef S_ISDIR // Not defined in stat.h on Windows.
+#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <memory>
+#include <type_traits>
+#include <utility>
+
+#if defined(WEBRTC_IOS)
+#include "test/testsupport/ios_file_utils.h"
+#elif defined(WEBRTC_MAC)
+#include "test/testsupport/mac_file_utils.h"
+#endif
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/strings/string_builder.h"
+#include "test/testsupport/file_utils_override.h"
+
+namespace webrtc {
+namespace test {
+
+#if defined(WEBRTC_WIN)
+ABSL_CONST_INIT const absl::string_view kPathDelimiter = "\\";
+#else
+ABSL_CONST_INIT const absl::string_view kPathDelimiter = "/";
+#endif
+
+std::string DirName(absl::string_view path) {
+ if (path.empty())
+ return "";
+ if (path == kPathDelimiter)
+ return std::string(path);
+
+ if (path.back() == kPathDelimiter[0])
+ path.remove_suffix(1); // Remove trailing separator.
+
+ return std::string(path.substr(0, path.find_last_of(kPathDelimiter)));
+}
+
+bool FileExists(absl::string_view file_name) {
+ struct stat file_info = {0};
+ return stat(std::string(file_name).c_str(), &file_info) == 0;
+}
+
+bool DirExists(absl::string_view directory_name) {
+ struct stat directory_info = {0};
+ return stat(std::string(directory_name).c_str(), &directory_info) == 0 &&
+ S_ISDIR(directory_info.st_mode);
+}
+
+std::string OutputPath() {
+ return webrtc::test::internal::OutputPath();
+}
+
+std::string WorkingDir() {
+ return webrtc::test::internal::WorkingDir();
+}
+
+// Generate a temporary filename in a safe way.
+// Largely copied from talk/base/{unixfilesystem,win32filesystem}.cc.
+std::string TempFilename(absl::string_view dir, absl::string_view prefix) {
+#ifdef WIN32
+ wchar_t filename[MAX_PATH];
+ if (::GetTempFileNameW(rtc::ToUtf16(dir).c_str(),
+ rtc::ToUtf16(prefix).c_str(), 0, filename) != 0)
+ return rtc::ToUtf8(filename);
+ RTC_DCHECK_NOTREACHED();
+ return "";
+#else
+ rtc::StringBuilder os;
+ os << dir << "/" << prefix << "XXXXXX";
+ std::string tempname = os.Release();
+
+ int fd = ::mkstemp(tempname.data());
+ if (fd == -1) {
+ RTC_DCHECK_NOTREACHED();
+ return "";
+ } else {
+ ::close(fd);
+ }
+ return tempname;
+#endif
+}
+
+std::string GenerateTempFilename(absl::string_view dir,
+ absl::string_view prefix) {
+ std::string filename = TempFilename(dir, prefix);
+ RemoveFile(filename);
+ return filename;
+}
+
+absl::optional<std::vector<std::string>> ReadDirectory(absl::string_view path) {
+ if (path.length() == 0)
+ return absl::optional<std::vector<std::string>>();
+
+ std::string path_str(path);
+
+#if defined(WEBRTC_WIN)
+ // Append separator character if needed.
+ if (path_str.back() != '\\')
+ path_str += '\\';
+
+ // Init.
+ WIN32_FIND_DATAW data;
+ HANDLE handle = ::FindFirstFileW(rtc::ToUtf16(path_str + '*').c_str(), &data);
+ if (handle == INVALID_HANDLE_VALUE)
+ return absl::optional<std::vector<std::string>>();
+
+ // Populate output.
+ std::vector<std::string> found_entries;
+ do {
+ const std::string name = rtc::ToUtf8(data.cFileName);
+ if (name != "." && name != "..")
+ found_entries.emplace_back(path_str + name);
+ } while (::FindNextFileW(handle, &data) == TRUE);
+
+ // Release resources.
+ if (handle != INVALID_HANDLE_VALUE)
+ ::FindClose(handle);
+#else
+ // Append separator character if needed.
+ if (path_str.back() != '/')
+ path_str += '/';
+
+ // Init.
+ DIR* dir = ::opendir(path_str.c_str());
+ if (dir == nullptr)
+ return absl::optional<std::vector<std::string>>();
+
+ // Populate output.
+ std::vector<std::string> found_entries;
+ while (dirent* dirent = readdir(dir)) {
+ const std::string& name = dirent->d_name;
+ if (name != "." && name != "..")
+ found_entries.emplace_back(path_str + name);
+ }
+
+ // Release resources.
+ closedir(dir);
+#endif
+
+ return absl::optional<std::vector<std::string>>(std::move(found_entries));
+}
+
+bool CreateDir(absl::string_view directory_name) {
+ std::string directory_name_str(directory_name);
+ struct stat path_info = {0};
+ // Check if the path exists already:
+ if (stat(directory_name_str.c_str(), &path_info) == 0) {
+ if (!S_ISDIR(path_info.st_mode)) {
+ fprintf(stderr,
+ "Path %s exists but is not a directory! Remove this "
+ "file and re-run to create the directory.\n",
+ directory_name_str.c_str());
+ return false;
+ }
+ } else {
+#ifdef WIN32
+ return _mkdir(directory_name_str.c_str()) == 0;
+#else
+ return mkdir(directory_name_str.c_str(), S_IRWXU | S_IRWXG | S_IRWXO) == 0;
+#endif
+ }
+ return true;
+}
+
+bool RemoveDir(absl::string_view directory_name) {
+#ifdef WIN32
+ return RemoveDirectoryA(std::string(directory_name).c_str()) != FALSE;
+#else
+ return rmdir(std::string(directory_name).c_str()) == 0;
+#endif
+}
+
+bool RemoveFile(absl::string_view file_name) {
+#ifdef WIN32
+ return DeleteFileA(std::string(file_name).c_str()) != FALSE;
+#else
+ return unlink(std::string(file_name).c_str()) == 0;
+#endif
+}
+
+std::string ResourcePath(absl::string_view name, absl::string_view extension) {
+ return webrtc::test::internal::ResourcePath(name, extension);
+}
+
+std::string JoinFilename(absl::string_view dir, absl::string_view name) {
+ RTC_CHECK(!dir.empty()) << "Special cases not implemented.";
+ rtc::StringBuilder os;
+ os << dir << kPathDelimiter << name;
+ return os.Release();
+}
+
+size_t GetFileSize(absl::string_view filename) {
+ FILE* f = fopen(std::string(filename).c_str(), "rb");
+ size_t size = 0;
+ if (f != NULL) {
+ if (fseek(f, 0, SEEK_END) == 0) {
+ size = ftell(f);
+ }
+ fclose(f);
+ }
+ return size;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/file_utils.h b/third_party/libwebrtc/test/testsupport/file_utils.h
new file mode 100644
index 0000000000..ab80ca4454
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/file_utils.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#ifndef TEST_TESTSUPPORT_FILE_UTILS_H_
+#define TEST_TESTSUPPORT_FILE_UTILS_H_
+
+#include <string>
+#include <vector>
+
+#include "absl/base/attributes.h"
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+
+namespace webrtc {
+namespace test {
+
+// Slash or backslash, depending on platform.
+ABSL_CONST_INIT extern const absl::string_view kPathDelimiter;
+
+// Returns the absolute path to the output directory where log files and other
+// test artifacts should be put. The output directory is generally a directory
+// named "out" at the project root. This root is assumed to be two levels above
+// where the test binary is located; this is because tests execute in a dir
+// out/Whatever relative to the project root. This convention is also followed
+// in Chromium.
+//
+// The exception is Android where we use /sdcard/ instead.
+//
+// If symbolic links occur in the path they will be resolved and the actual
+// directory will be returned.
+//
+// Returns the path WITH a trailing path delimiter. If the project root is not
+// found, the current working directory ("./") is returned as a fallback.
+std::string OutputPath();
+
+// Generates an empty file with a unique name in the specified directory and
+// returns the file name and path.
+// TODO(titovartem) rename to TempFile and next method to TempFilename
+std::string TempFilename(absl::string_view dir, absl::string_view prefix);
+
+// Generates a unique file name that can be used for file creation. Doesn't
+// create any files.
+std::string GenerateTempFilename(absl::string_view dir,
+ absl::string_view prefix);
+
+// Returns a path to a resource file in [project-root]/resources/ dir.
+// Returns an absolute path
+//
+// Arguments:
+// name - Name of the resource file. If a plain filename (no directory path)
+// is supplied, the file is assumed to be located in resources/
+// If a directory path is prepended to the filename, a subdirectory
+// hierarchy reflecting that path is assumed to be present.
+// extension - File extension, without the dot, i.e. "bmp" or "yuv".
+std::string ResourcePath(absl::string_view name, absl::string_view extension);
+
+// Joins directory name and file name, separated by the path delimiter.
+std::string JoinFilename(absl::string_view dir, absl::string_view name);
+
+// Gets the current working directory for the executing program.
+// Returns "./" if for some reason it is not possible to find the working
+// directory.
+std::string WorkingDir();
+
+// Reads the content of a directory and, in case of success, returns a vector
+// of strings with one element for each found file or directory. Each element is
+// a path created by prepending `dir` to the file/directory name. "." and ".."
+// are never added in the returned vector.
+absl::optional<std::vector<std::string>> ReadDirectory(absl::string_view path);
+
+// Creates a directory if it not already exists.
+// Returns true if successful. Will print an error message to stderr and return
+// false if a file with the same name already exists.
+bool CreateDir(absl::string_view directory_name);
+
+// Removes a directory, which must already be empty.
+bool RemoveDir(absl::string_view directory_name);
+
+// Removes a file.
+bool RemoveFile(absl::string_view file_name);
+
+// Checks if a file exists.
+// TOOD(alito): Merge these once absl::string_view adoption is complete for this
+// file.
+bool FileExists(absl::string_view file_name);
+
+// Checks if a directory exists.
+bool DirExists(absl::string_view directory_name);
+
+// Strips the rightmost path segment from a path.
+std::string DirName(absl::string_view path);
+
+// File size of the supplied file in bytes. Will return 0 if the file is
+// empty or if the file does not exist/is readable.
+size_t GetFileSize(absl::string_view filename);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_FILE_UTILS_H_
diff --git a/third_party/libwebrtc/test/testsupport/file_utils_override.cc b/third_party/libwebrtc/test/testsupport/file_utils_override.cc
new file mode 100644
index 0000000000..7d0a3e3312
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/file_utils_override.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/file_utils_override.h"
+
+#include <limits.h>
+#include <stdio.h>
+
+#if defined(WEBRTC_WIN)
+#include <direct.h>
+#include <tchar.h>
+#include <windows.h>
+
+#include <algorithm>
+#include <codecvt>
+#include <locale>
+
+#include "Shlwapi.h"
+#include "WinDef.h"
+#include "rtc_base/win32.h"
+
+#define GET_CURRENT_DIR _getcwd
+#else
+#include <unistd.h>
+
+#define GET_CURRENT_DIR getcwd
+#endif
+
+#if defined(WEBRTC_IOS)
+#include "test/testsupport/ios_file_utils.h"
+#endif
+
+#if defined(WEBRTC_MAC)
+#include "test/testsupport/mac_file_utils.h"
+#endif
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+namespace test {
+
+std::string DirName(absl::string_view path);
+bool CreateDir(absl::string_view directory_name);
+
+namespace internal {
+
+namespace {
+#if defined(WEBRTC_WIN)
+const absl::string_view kPathDelimiter = "\\";
+#elif !defined(WEBRTC_IOS)
+const absl::string_view kPathDelimiter = "/";
+#endif
+
+#if defined(WEBRTC_ANDROID)
+// This is a special case in Chrome infrastructure. See
+// base/test/test_support_android.cc.
+const absl::string_view kAndroidChromiumTestsRoot =
+ "/sdcard/chromium_tests_root/";
+#endif
+#if defined(WEBRTC_FUCHSIA)
+const absl::string_view kFuchsiaTestRoot = "/pkg/";
+const absl::string_view kFuchsiaTempWritableDir = "/tmp/";
+#endif
+#if !defined(WEBRTC_IOS)
+const absl::string_view kResourcesDirName = "resources";
+#endif
+
+} // namespace
+
+// Finds the WebRTC src dir.
+// The returned path always ends with a path separator.
+absl::optional<std::string> ProjectRootPath() {
+#if defined(WEBRTC_ANDROID)
+ return std::string(kAndroidChromiumTestsRoot);
+#elif defined WEBRTC_IOS
+ return IOSRootPath();
+#elif defined(WEBRTC_MAC)
+ std::string path;
+ GetNSExecutablePath(&path);
+ std::string exe_dir = DirName(path);
+ // On Mac, tests execute in out/Whatever, so src is two levels up except if
+ // the test is bundled (which our tests are not), in which case it's 5 levels.
+ return DirName(DirName(exe_dir)) + std::string(kPathDelimiter);
+#elif defined(WEBRTC_POSIX)
+// Fuchsia uses POSIX defines as well but does not have full POSIX
+// functionality.
+#if defined(WEBRTC_FUCHSIA)
+ return std::string(kFuchsiaTestRoot);
+#else
+ char buf[PATH_MAX];
+ ssize_t count = ::readlink("/proc/self/exe", buf, arraysize(buf));
+ if (count <= 0) {
+ RTC_DCHECK_NOTREACHED() << "Unable to resolve /proc/self/exe.";
+ return absl::nullopt;
+ }
+ // On POSIX, tests execute in out/Whatever, so src is two levels up.
+ std::string exe_dir = DirName(absl::string_view(buf, count));
+ return DirName(DirName(exe_dir)) + std::string(kPathDelimiter);
+#endif
+#elif defined(WEBRTC_WIN)
+ wchar_t buf[MAX_PATH];
+ buf[0] = 0;
+ if (GetModuleFileNameW(NULL, buf, MAX_PATH) == 0)
+ return absl::nullopt;
+
+ std::string exe_path = rtc::ToUtf8(std::wstring(buf));
+ std::string exe_dir = DirName(exe_path);
+ return DirName(DirName(exe_dir)) + std::string(kPathDelimiter);
+#endif
+}
+
+std::string OutputPath() {
+#if defined(WEBRTC_IOS)
+ return IOSOutputPath();
+#elif defined(WEBRTC_ANDROID)
+ return std::string(kAndroidChromiumTestsRoot);
+#elif defined(WEBRTC_FUCHSIA)
+ return std::string(kFuchsiaTempWritableDir);
+#else
+ absl::optional<std::string> path_opt = ProjectRootPath();
+ RTC_DCHECK(path_opt);
+ std::string path = *path_opt + "out";
+ if (!CreateDir(path)) {
+ return "./";
+ }
+ return path + std::string(kPathDelimiter);
+#endif
+}
+
+std::string WorkingDir() {
+#if defined(WEBRTC_ANDROID)
+ return std::string(kAndroidChromiumTestsRoot);
+#else
+ char path_buffer[FILENAME_MAX];
+ if (!GET_CURRENT_DIR(path_buffer, sizeof(path_buffer))) {
+ fprintf(stderr, "Cannot get current directory!\n");
+ return "./";
+ } else {
+ return std::string(path_buffer);
+ }
+#endif
+}
+
+std::string ResourcePath(absl::string_view name, absl::string_view extension) {
+#if defined(WEBRTC_IOS)
+ return IOSResourcePath(name, extension);
+#else
+ absl::optional<std::string> path_opt = ProjectRootPath();
+ RTC_DCHECK(path_opt);
+ rtc::StringBuilder os(*path_opt);
+ os << kResourcesDirName << kPathDelimiter << name << "." << extension;
+ return os.Release();
+#endif
+}
+
+} // namespace internal
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/file_utils_override.h b/third_party/libwebrtc/test/testsupport/file_utils_override.h
new file mode 100644
index 0000000000..9f119e6d4e
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/file_utils_override.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_FILE_UTILS_OVERRIDE_H_
+#define TEST_TESTSUPPORT_FILE_UTILS_OVERRIDE_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+namespace test {
+namespace internal {
+
+// Returns the absolute path to the output directory where log files and other
+// test artifacts should be put. The output directory is generally a directory
+// named "out" at the project root. This root is assumed to be two levels above
+// where the test binary is located; this is because tests execute in a dir
+// out/Whatever relative to the project root. This convention is also followed
+// in Chromium.
+//
+// The exception is Android where we use /sdcard/ instead.
+//
+// If symbolic links occur in the path they will be resolved and the actual
+// directory will be returned.
+//
+// Returns the path WITH a trailing path delimiter. If the project root is not
+// found, the current working directory ("./") is returned as a fallback.
+std::string OutputPath();
+
+// Gets the current working directory for the executing program.
+// Returns "./" if for some reason it is not possible to find the working
+// directory.
+std::string WorkingDir();
+
+// Returns a full path to a resource file in the resources_dir dir.
+//
+// Arguments:
+// name - Name of the resource file. If a plain filename (no directory path)
+// is supplied, the file is assumed to be located in resources/
+// If a directory path is prepended to the filename, a subdirectory
+// hierarchy reflecting that path is assumed to be present.
+// extension - File extension, without the dot, i.e. "bmp" or "yuv".
+std::string ResourcePath(absl::string_view name, absl::string_view extension);
+
+} // namespace internal
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_FILE_UTILS_OVERRIDE_H_
diff --git a/third_party/libwebrtc/test/testsupport/file_utils_unittest.cc b/third_party/libwebrtc/test/testsupport/file_utils_unittest.cc
new file mode 100644
index 0000000000..b9de01d09d
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/file_utils_unittest.cc
@@ -0,0 +1,277 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/file_utils.h"
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <fstream>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "rtc_base/checks.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+#ifdef WIN32
+#define chdir _chdir
+#endif
+
+using ::testing::EndsWith;
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+std::string Path(absl::string_view path) {
+ std::string result(path);
+ std::replace(result.begin(), result.end(), '/', kPathDelimiter[0]);
+ return result;
+}
+
+// Remove files and directories in a directory non-recursively and writes the
+// number of deleted items in `num_deleted_entries`.
+void CleanDir(absl::string_view dir, size_t* num_deleted_entries) {
+ RTC_DCHECK(num_deleted_entries);
+ *num_deleted_entries = 0;
+ absl::optional<std::vector<std::string>> dir_content = ReadDirectory(dir);
+ EXPECT_TRUE(dir_content);
+ for (const auto& entry : *dir_content) {
+ if (DirExists(entry)) {
+ EXPECT_TRUE(RemoveDir(entry));
+ (*num_deleted_entries)++;
+ } else if (FileExists(entry)) {
+ EXPECT_TRUE(RemoveFile(entry));
+ (*num_deleted_entries)++;
+ } else {
+ FAIL();
+ }
+ }
+}
+
+void WriteStringInFile(absl::string_view what, absl::string_view file_path) {
+ std::ofstream out(std::string{file_path});
+ out << what;
+ out.close();
+}
+
+} // namespace
+
+// Test fixture to restore the working directory between each test, since some
+// of them change it with chdir during execution (not restored by the
+// gtest framework).
+class FileUtilsTest : public ::testing::Test {
+ protected:
+ FileUtilsTest() {}
+ ~FileUtilsTest() override {}
+ // Runs before the first test
+ static void SetUpTestSuite() {
+ original_working_dir_ = webrtc::test::WorkingDir();
+ }
+ void SetUp() override { ASSERT_EQ(chdir(original_working_dir_.c_str()), 0); }
+ void TearDown() override {
+ ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
+ }
+
+ private:
+ static std::string original_working_dir_;
+};
+
+std::string FileUtilsTest::original_working_dir_ = "";
+
+// The location will vary depending on where the webrtc checkout is on the
+// system, but it should end as described above and be an absolute path.
+std::string ExpectedRootDirByPlatform() {
+#if defined(WEBRTC_ANDROID)
+ return Path("chromium_tests_root/");
+#elif defined(WEBRTC_IOS)
+ return Path("tmp/");
+#else
+ return Path("out/");
+#endif
+}
+
+TEST_F(FileUtilsTest, OutputPathFromUnchangedWorkingDir) {
+ std::string expected_end = ExpectedRootDirByPlatform();
+ std::string result = webrtc::test::OutputPath();
+
+ ASSERT_THAT(result, EndsWith(expected_end));
+}
+
+// Tests with current working directory set to a directory higher up in the
+// directory tree than the project root dir.
+TEST_F(FileUtilsTest, OutputPathFromRootWorkingDir) {
+ ASSERT_EQ(0, chdir(kPathDelimiter.data()));
+
+ std::string expected_end = ExpectedRootDirByPlatform();
+ std::string result = webrtc::test::OutputPath();
+
+ ASSERT_THAT(result, EndsWith(expected_end));
+}
+
+TEST_F(FileUtilsTest, TempFilename) {
+ std::string temp_filename = webrtc::test::TempFilename(
+ webrtc::test::OutputPath(), "TempFilenameTest");
+ ASSERT_TRUE(webrtc::test::FileExists(temp_filename))
+ << "Couldn't find file: " << temp_filename;
+ remove(temp_filename.c_str());
+}
+
+TEST_F(FileUtilsTest, GenerateTempFilename) {
+ std::string temp_filename = webrtc::test::GenerateTempFilename(
+ webrtc::test::OutputPath(), "TempFilenameTest");
+ ASSERT_FALSE(webrtc::test::FileExists(temp_filename))
+ << "File exists: " << temp_filename;
+ FILE* file = fopen(temp_filename.c_str(), "wb");
+ ASSERT_TRUE(file != NULL) << "Failed to open file: " << temp_filename;
+ ASSERT_GT(fprintf(file, "%s", "Dummy data"), 0)
+ << "Failed to write to file: " << temp_filename;
+ fclose(file);
+ remove(temp_filename.c_str());
+}
+
+// Only tests that the code executes
+#if defined(WEBRTC_IOS)
+#define MAYBE_CreateDir DISABLED_CreateDir
+#else
+#define MAYBE_CreateDir CreateDir
+#endif
+TEST_F(FileUtilsTest, MAYBE_CreateDir) {
+ std::string directory = "fileutils-unittest-empty-dir";
+ // Make sure it's removed if a previous test has failed:
+ remove(directory.c_str());
+ ASSERT_TRUE(webrtc::test::CreateDir(directory));
+ remove(directory.c_str());
+}
+
+TEST_F(FileUtilsTest, WorkingDirReturnsValue) {
+ // This will obviously be different depending on where the webrtc checkout is,
+ // so just check something is returned.
+ std::string working_dir = webrtc::test::WorkingDir();
+ ASSERT_GT(working_dir.length(), 0u);
+}
+
+TEST_F(FileUtilsTest, ResourcePathReturnsCorrectPath) {
+ std::string result = webrtc::test::ResourcePath(
+ Path("video_coding/frame-ethernet-ii"), "pcap");
+#if defined(WEBRTC_IOS)
+ // iOS bundles resources straight into the bundle root.
+ std::string expected_end = Path("/frame-ethernet-ii.pcap");
+#else
+ // Other platforms: it's a separate dir.
+ std::string expected_end =
+ Path("resources/video_coding/frame-ethernet-ii.pcap");
+#endif
+
+ ASSERT_THAT(result, EndsWith(expected_end));
+ ASSERT_TRUE(FileExists(result)) << "Expected " << result
+ << " to exist; did "
+ "ResourcePath return an incorrect path?";
+}
+
+TEST_F(FileUtilsTest, ResourcePathFromRootWorkingDir) {
+ ASSERT_EQ(0, chdir(kPathDelimiter.data()));
+ std::string resource = webrtc::test::ResourcePath("whatever", "ext");
+#if !defined(WEBRTC_IOS)
+ ASSERT_NE(resource.find("resources"), std::string::npos);
+#endif
+ ASSERT_GT(resource.find("whatever"), 0u);
+ ASSERT_GT(resource.find("ext"), 0u);
+}
+
+TEST_F(FileUtilsTest, GetFileSizeExistingFile) {
+ // Create a file with some dummy data in.
+ std::string temp_filename = webrtc::test::TempFilename(
+ webrtc::test::OutputPath(), "fileutils_unittest");
+ FILE* file = fopen(temp_filename.c_str(), "wb");
+ ASSERT_TRUE(file != NULL) << "Failed to open file: " << temp_filename;
+ ASSERT_GT(fprintf(file, "%s", "Dummy data"), 0)
+ << "Failed to write to file: " << temp_filename;
+ fclose(file);
+ ASSERT_GT(webrtc::test::GetFileSize(temp_filename), 0u);
+ remove(temp_filename.c_str());
+}
+
+TEST_F(FileUtilsTest, GetFileSizeNonExistingFile) {
+ ASSERT_EQ(0u, webrtc::test::GetFileSize("non-existing-file.tmp"));
+}
+
+TEST_F(FileUtilsTest, DirExists) {
+ // Check that an existing directory is recognized as such.
+ ASSERT_TRUE(webrtc::test::DirExists(webrtc::test::OutputPath()))
+ << "Existing directory not found";
+
+ // Check that a non-existing directory is recognized as such.
+ std::string directory = "direxists-unittest-non_existing-dir";
+ ASSERT_FALSE(webrtc::test::DirExists(directory))
+ << "Non-existing directory found";
+
+ // Check that an existing file is not recognized as an existing directory.
+ std::string temp_filename = webrtc::test::TempFilename(
+ webrtc::test::OutputPath(), "TempFilenameTest");
+ ASSERT_TRUE(webrtc::test::FileExists(temp_filename))
+ << "Couldn't find file: " << temp_filename;
+ ASSERT_FALSE(webrtc::test::DirExists(temp_filename))
+ << "Existing file recognized as existing directory";
+ remove(temp_filename.c_str());
+}
+
+TEST_F(FileUtilsTest, WriteReadDeleteFilesAndDirs) {
+ size_t num_deleted_entries;
+
+ // Create an empty temporary directory for this test.
+ const std::string temp_directory =
+ OutputPath() + Path("TempFileUtilsTestReadDirectory/");
+ CreateDir(temp_directory);
+ EXPECT_NO_FATAL_FAILURE(CleanDir(temp_directory, &num_deleted_entries));
+ EXPECT_TRUE(DirExists(temp_directory));
+
+ // Add a file.
+ const std::string temp_filename = temp_directory + "TempFilenameTest";
+ WriteStringInFile("test\n", temp_filename);
+ EXPECT_TRUE(FileExists(temp_filename));
+
+ // Add an empty directory.
+ const std::string temp_subdir = temp_directory + Path("subdir/");
+ EXPECT_TRUE(CreateDir(temp_subdir));
+ EXPECT_TRUE(DirExists(temp_subdir));
+
+ // Checks.
+ absl::optional<std::vector<std::string>> dir_content =
+ ReadDirectory(temp_directory);
+ EXPECT_TRUE(dir_content);
+ EXPECT_EQ(2u, dir_content->size());
+ EXPECT_NO_FATAL_FAILURE(CleanDir(temp_directory, &num_deleted_entries));
+ EXPECT_EQ(2u, num_deleted_entries);
+ EXPECT_TRUE(RemoveDir(temp_directory));
+ EXPECT_FALSE(DirExists(temp_directory));
+}
+
+TEST_F(FileUtilsTest, DirNameStripsFilename) {
+ EXPECT_EQ(Path("/some/path"), DirName(Path("/some/path/file.txt")));
+}
+
+TEST_F(FileUtilsTest, DirNameKeepsStrippingRightmostPathComponent) {
+ EXPECT_EQ(Path("/some"), DirName(DirName(Path("/some/path/file.txt"))));
+}
+
+TEST_F(FileUtilsTest, DirNameDoesntCareIfAPathEndsInPathSeparator) {
+ EXPECT_EQ(Path("/some"), DirName(Path("/some/path/")));
+}
+
+TEST_F(FileUtilsTest, DirNameStopsAtRoot) {
+ EXPECT_EQ(Path("/"), DirName(Path("/")));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.cc b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.cc
new file mode 100644
index 0000000000..531dade0e8
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/fixed_fps_video_frame_writer_adapter.h"
+
+#include <cmath>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/units/time_delta.h"
+#include "api/video/video_sink_interface.h"
+#include "rtc_base/checks.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr TimeDelta kOneSecond = TimeDelta::Seconds(1);
+
+} // namespace
+
+FixedFpsVideoFrameWriterAdapter::FixedFpsVideoFrameWriterAdapter(
+ int fps,
+ Clock* clock,
+ std::unique_ptr<VideoFrameWriter> delegate)
+ : inter_frame_interval_(kOneSecond / fps),
+ clock_(clock),
+ delegate_(std::move(delegate)) {}
+
+FixedFpsVideoFrameWriterAdapter::~FixedFpsVideoFrameWriterAdapter() {
+ Close();
+}
+
+void FixedFpsVideoFrameWriterAdapter::Close() {
+ if (is_closed_) {
+ return;
+ }
+ is_closed_ = true;
+ if (!last_frame_.has_value()) {
+ return;
+ }
+ Timestamp now = Now();
+ RTC_CHECK(WriteMissedSlotsExceptLast(now));
+ RTC_CHECK(delegate_->WriteFrame(*last_frame_));
+ delegate_->Close();
+}
+
+bool FixedFpsVideoFrameWriterAdapter::WriteFrame(const VideoFrame& frame) {
+ RTC_CHECK(!is_closed_);
+ Timestamp now = Now();
+ if (!last_frame_.has_value()) {
+ RTC_CHECK(!last_frame_time_.IsFinite());
+ last_frame_ = frame;
+ last_frame_time_ = now;
+ return true;
+ }
+
+ RTC_CHECK(last_frame_time_.IsFinite());
+
+ if (last_frame_time_ > now) {
+ // New frame was recevied before expected time "slot" for current
+ // `last_frame_` came => just replace current `last_frame_` with
+ // received `frame`.
+ RTC_CHECK_LE(last_frame_time_ - now, inter_frame_interval_ / 2);
+ last_frame_ = frame;
+ return true;
+ }
+
+ if (!WriteMissedSlotsExceptLast(now)) {
+ return false;
+ }
+
+ if (now - last_frame_time_ < inter_frame_interval_ / 2) {
+ // New frame was received closer to the expected time "slot" for current
+ // `last_frame_` than to the next "slot" => just replace current
+ // `last_frame_` with received `frame`.
+ last_frame_ = frame;
+ return true;
+ }
+
+ if (!delegate_->WriteFrame(*last_frame_)) {
+ return false;
+ }
+ last_frame_ = frame;
+ last_frame_time_ = last_frame_time_ + inter_frame_interval_;
+ return true;
+}
+
+bool FixedFpsVideoFrameWriterAdapter::WriteMissedSlotsExceptLast(
+ Timestamp now) {
+ RTC_CHECK(last_frame_time_.IsFinite());
+ while (now - last_frame_time_ > inter_frame_interval_) {
+ if (!delegate_->WriteFrame(*last_frame_)) {
+ return false;
+ }
+ last_frame_time_ = last_frame_time_ + inter_frame_interval_;
+ }
+ return true;
+}
+
+Timestamp FixedFpsVideoFrameWriterAdapter::Now() const {
+ return clock_->CurrentTime();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.h b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.h
new file mode 100644
index 0000000000..d4d95e9f82
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_FIXED_FPS_VIDEO_FRAME_WRITER_ADAPTER_H_
+#define TEST_TESTSUPPORT_FIXED_FPS_VIDEO_FRAME_WRITER_ADAPTER_H_
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/video/video_sink_interface.h"
+#include "system_wrappers/include/clock.h"
+#include "test/testsupport/video_frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+// Writes video to the specified video writer with specified fixed frame rate.
+// If at the point in time X no new frames are passed to the writer, the
+// previous frame is used to fill the gap and preserve frame rate.
+//
+// This adaptor uses next algorithm:
+// There are output "slots" at a fixed frame rate (starting at the time of the
+// first received frame). Each incoming frame is assigned to the closest output
+// slot. Then empty slots are filled by repeating the closest filled slot before
+// empty one. If there are multiple frames closest to the same slot, the latest
+// received one is used.
+//
+// The frames are outputted for the whole duration of the class life after the
+// first frame was written or until it will be closed.
+//
+// For example if frames from A to F were received, then next output sequence
+// will be generated:
+// Received frames: A B C D EF Destructor called
+// | | | | || |
+// v v v v vv v
+// X----X----X----X----X----X----X----X----X----+----+--
+// | | | | | | | | |
+// Produced frames: A A A B C C F F F
+//
+// This class is not thread safe.
+class FixedFpsVideoFrameWriterAdapter : public VideoFrameWriter {
+ public:
+ FixedFpsVideoFrameWriterAdapter(int fps,
+ Clock* clock,
+ std::unique_ptr<VideoFrameWriter> delegate);
+ ~FixedFpsVideoFrameWriterAdapter() override;
+
+ bool WriteFrame(const webrtc::VideoFrame& frame) override;
+
+ // Closes adapter and underlying delegate. User mustn't call to the WriteFrame
+ // after calling this method.
+ void Close() override;
+
+ private:
+ // Writes `last_frame_` for each "slot" from `last_frame_time_` up to now
+ // excluding the last one.
+ // Updates `last_frame_time_` to the position of the last NOT WRITTEN frame.
+ // Returns true if all writes were successful, otherwise retuns false. In such
+ // case it is not guranteed how many frames were actually written.
+ bool WriteMissedSlotsExceptLast(Timestamp now);
+ Timestamp Now() const;
+
+ // Because `TimeDelta` stores time with microseconds precision
+ // `last_frame_time_` may have a small drift and for very long streams it
+ // must be updated to use double for time.
+ const TimeDelta inter_frame_interval_;
+ Clock* const clock_;
+ std::unique_ptr<VideoFrameWriter> delegate_;
+ bool is_closed_ = false;
+
+ // Expected time slot for the last frame.
+ Timestamp last_frame_time_ = Timestamp::MinusInfinity();
+ absl::optional<VideoFrame> last_frame_ = absl::nullopt;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_FIXED_FPS_VIDEO_FRAME_WRITER_ADAPTER_H_
diff --git a/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter_test.cc b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter_test.cc
new file mode 100644
index 0000000000..5ee4701cc9
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/fixed_fps_video_frame_writer_adapter_test.cc
@@ -0,0 +1,320 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/fixed_fps_video_frame_writer_adapter.h"
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/video_frame_writer.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr TimeDelta kOneSecond = TimeDelta::Seconds(1);
+
+using ::testing::ElementsAre;
+
+class InMemoryVideoWriter : public VideoFrameWriter {
+ public:
+ ~InMemoryVideoWriter() override = default;
+
+ bool WriteFrame(const webrtc::VideoFrame& frame) override {
+ MutexLock lock(&mutex_);
+ received_frames_.push_back(frame);
+ return true;
+ }
+
+ void Close() override {}
+
+ std::vector<VideoFrame> received_frames() const {
+ MutexLock lock(&mutex_);
+ return received_frames_;
+ }
+
+ private:
+ mutable Mutex mutex_;
+ std::vector<VideoFrame> received_frames_ RTC_GUARDED_BY(mutex_);
+};
+
+VideoFrame EmptyFrameWithId(uint16_t frame_id) {
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(1, 1))
+ .set_id(frame_id)
+ .build();
+}
+
+std::vector<uint16_t> FrameIds(const std::vector<VideoFrame>& frames) {
+ std::vector<uint16_t> out;
+ for (const VideoFrame& frame : frames) {
+ out.push_back(frame.id());
+ }
+ return out;
+}
+
+std::unique_ptr<TimeController> CreateSimulatedTimeController() {
+ // Using an offset of 100000 to get nice fixed width and readable
+ // timestamps in typical test scenarios.
+ const Timestamp kSimulatedStartTime = Timestamp::Seconds(100000);
+ return std::make_unique<GlobalSimulatedTimeController>(kSimulatedStartTime);
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ WhenWrittenWithSameFpsVideoIsCorrect) {
+ auto time_controller = CreateSimulatedTimeController();
+ int fps = 25;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(fps, time_controller->GetClock(),
+ std::move(inmemory_writer));
+
+ for (int i = 1; i <= 30; ++i) {
+ video_writer.WriteFrame(EmptyFrameWithId(i));
+ time_controller->AdvanceTime(kOneSecond / fps);
+ }
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(
+ FrameIds(received_frames),
+ ElementsAre(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest, FrameIsRepeatedWhenThereIsAFreeze) {
+ auto time_controller = CreateSimulatedTimeController();
+ int fps = 25;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(fps, time_controller->GetClock(),
+ std::move(inmemory_writer));
+
+ // Write 10 frames
+ for (int i = 1; i <= 10; ++i) {
+ video_writer.WriteFrame(EmptyFrameWithId(i));
+ time_controller->AdvanceTime(kOneSecond / fps);
+ }
+
+ // Freeze for 4 frames
+ time_controller->AdvanceTime(4 * kOneSecond / fps);
+
+ // Write 10 more frames
+ for (int i = 11; i <= 20; ++i) {
+ video_writer.WriteFrame(EmptyFrameWithId(i));
+ time_controller->AdvanceTime(kOneSecond / fps);
+ }
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames),
+ ElementsAre(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest, NoFramesWritten) {
+ auto time_controller = CreateSimulatedTimeController();
+ int fps = 25;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(fps, time_controller->GetClock(),
+ std::move(inmemory_writer));
+ time_controller->AdvanceTime(TimeDelta::Millis(100));
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ ASSERT_TRUE(received_frames.empty());
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ FreezeInTheMiddleAndNewFrameReceivedBeforeMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(2.3 * kInterval);
+ video_writer.WriteFrame(EmptyFrameWithId(2));
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1, 1, 2));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ FreezeInTheMiddleAndNewFrameReceivedAfterMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(2.5 * kInterval);
+ video_writer.WriteFrame(EmptyFrameWithId(2));
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1, 1, 1, 2));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ NewFrameReceivedBeforeMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(0.3 * kInterval);
+ video_writer.WriteFrame(EmptyFrameWithId(2));
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(2));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ NewFrameReceivedAfterMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(0.5 * kInterval);
+ video_writer.WriteFrame(EmptyFrameWithId(2));
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1, 2));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ FreeezeAtTheEndAndDestroyBeforeMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(2.3 * kInterval);
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1, 1, 1));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ FreeezeAtTheEndAndDestroyAfterMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(2.5 * kInterval);
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1, 1, 1));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ DestroyBeforeMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(0.3 * kInterval);
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1));
+}
+
+TEST(FixedFpsVideoFrameWriterAdapterTest,
+ DestroyAfterMiddleOfExpectedInterval) {
+ auto time_controller = CreateSimulatedTimeController();
+ constexpr int kFps = 10;
+ constexpr TimeDelta kInterval = kOneSecond / kFps;
+
+ auto inmemory_writer = std::make_unique<InMemoryVideoWriter>();
+ InMemoryVideoWriter* inmemory_writer_ref = inmemory_writer.get();
+
+ FixedFpsVideoFrameWriterAdapter video_writer(
+ kFps, time_controller->GetClock(), std::move(inmemory_writer));
+ video_writer.WriteFrame(EmptyFrameWithId(1));
+ time_controller->AdvanceTime(0.5 * kInterval);
+ video_writer.Close();
+
+ std::vector<VideoFrame> received_frames =
+ inmemory_writer_ref->received_frames();
+ EXPECT_THAT(FrameIds(received_frames), ElementsAre(1));
+}
+
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/frame_reader.h b/third_party/libwebrtc/test/testsupport/frame_reader.h
new file mode 100644
index 0000000000..7856476ca0
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/frame_reader.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_FRAME_READER_H_
+#define TEST_TESTSUPPORT_FRAME_READER_H_
+
+#include <stdio.h>
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/video/resolution.h"
+
+namespace webrtc {
+class I420Buffer;
+namespace test {
+
+// Handles reading of I420 frames from video files.
+class FrameReader {
+ public:
+ struct Ratio {
+ int num = 1;
+ int den = 1;
+ };
+
+ static constexpr Ratio kNoScale = Ratio({.num = 1, .den = 1});
+
+ virtual ~FrameReader() {}
+
+ // Reads and returns next frame. Returns `nullptr` if reading failed or end of
+ // stream is reached.
+ virtual rtc::scoped_refptr<I420Buffer> PullFrame() = 0;
+
+ // Reads and returns next frame. `frame_num` stores unwrapped frame number
+ // which can be passed to `ReadFrame` to re-read this frame later. Returns
+ // `nullptr` if reading failed or end of stream is reached.
+ virtual rtc::scoped_refptr<I420Buffer> PullFrame(int* frame_num) = 0;
+
+ // Reads and returns frame specified by `frame_num`. Returns `nullptr` if
+ // reading failed.
+ virtual rtc::scoped_refptr<I420Buffer> ReadFrame(int frame_num) = 0;
+
+ // Reads next frame, resizes and returns it. `frame_num` stores unwrapped
+ // frame number which can be passed to `ReadFrame` to re-read this frame
+ // later. `resolution` specifies resolution of the returned frame.
+ // `framerate_scale` specifies frame rate scale factor. Frame rate scaling is
+ // done by skipping or repeating frames.
+ virtual rtc::scoped_refptr<I420Buffer> PullFrame(int* frame_num,
+ Resolution resolution,
+ Ratio framerate_scale) = 0;
+
+ // Reads frame specified by `frame_num`, resizes and returns it. Returns
+ // `nullptr` if reading failed.
+ virtual rtc::scoped_refptr<I420Buffer> ReadFrame(int frame_num,
+ Resolution resolution) = 0;
+
+ // Total number of retrievable frames.
+ virtual int num_frames() const = 0;
+};
+
+class YuvFrameReaderImpl : public FrameReader {
+ public:
+ enum class RepeatMode { kSingle, kRepeat, kPingPong };
+
+ // Creates the frame reader for a YUV file specified by `filepath`.
+ // `resolution` specifies width and height of frames in pixels. `repeat_mode`
+ // specifies behaviour of the reader at reaching the end of file (stop, read
+ // it over from the beginning or read in reverse order). The file is assumed
+ // to exist, be readable and to contain at least 1 frame.
+ YuvFrameReaderImpl(std::string filepath,
+ Resolution resolution,
+ RepeatMode repeat_mode);
+
+ ~YuvFrameReaderImpl() override;
+
+ virtual void Init();
+
+ rtc::scoped_refptr<I420Buffer> PullFrame() override;
+
+ rtc::scoped_refptr<I420Buffer> PullFrame(int* frame_num) override;
+
+ rtc::scoped_refptr<I420Buffer> PullFrame(int* frame_num,
+ Resolution resolution,
+ Ratio framerate_scale) override;
+
+ rtc::scoped_refptr<I420Buffer> ReadFrame(int frame_num) override;
+
+ rtc::scoped_refptr<I420Buffer> ReadFrame(int frame_num,
+ Resolution resolution) override;
+
+ int num_frames() const override { return num_frames_; }
+
+ protected:
+ class RateScaler {
+ public:
+ int Skip(Ratio framerate_scale);
+
+ private:
+ absl::optional<int> ticks_;
+ };
+
+ const std::string filepath_;
+ Resolution resolution_;
+ const RepeatMode repeat_mode_;
+ int num_frames_;
+ int frame_num_;
+ int frame_size_bytes_;
+ int header_size_bytes_;
+ FILE* file_;
+ RateScaler framerate_scaler_;
+};
+
+class Y4mFrameReaderImpl : public YuvFrameReaderImpl {
+ public:
+ // Creates the frame reader for a Y4M file specified by `filepath`.
+ // `repeat_mode` specifies behaviour of the reader at reaching the end of file
+ // (stop, read it over from the beginning or read in reverse order). The file
+ // is assumed to exist, be readable and to contain at least 1 frame.
+ Y4mFrameReaderImpl(std::string filepath, RepeatMode repeat_mode);
+
+ void Init() override;
+};
+
+std::unique_ptr<FrameReader> CreateYuvFrameReader(std::string filepath,
+ Resolution resolution);
+
+std::unique_ptr<FrameReader> CreateYuvFrameReader(
+ std::string filepath,
+ Resolution resolution,
+ YuvFrameReaderImpl::RepeatMode repeat_mode);
+
+std::unique_ptr<FrameReader> CreateY4mFrameReader(std::string filepath);
+
+std::unique_ptr<FrameReader> CreateY4mFrameReader(
+ std::string filepath,
+ YuvFrameReaderImpl::RepeatMode repeat_mode);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_FRAME_READER_H_
diff --git a/third_party/libwebrtc/test/testsupport/frame_writer.h b/third_party/libwebrtc/test/testsupport/frame_writer.h
new file mode 100644
index 0000000000..5f85d8bcd4
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/frame_writer.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_FRAME_WRITER_H_
+#define TEST_TESTSUPPORT_FRAME_WRITER_H_
+
+#include <stdio.h>
+
+#include <string>
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+namespace test {
+
+// Handles writing of video files.
+class FrameWriter {
+ public:
+ virtual ~FrameWriter() {}
+
+ // Initializes the file handler, i.e. opens the input and output files etc.
+ // This must be called before reading or writing frames has started.
+ // Returns false if an error has occurred, in addition to printing to stderr.
+ virtual bool Init() = 0;
+
+ // Writes a frame of the configured frame length to the output file.
+ // Returns true if the write was successful, false otherwise.
+ virtual bool WriteFrame(const uint8_t* frame_buffer) = 0;
+
+ // Closes the output file if open. Essentially makes this class impossible
+ // to use anymore. Will also be invoked by the destructor.
+ virtual void Close() = 0;
+
+ // Frame length in bytes of a single frame image.
+ virtual size_t FrameLength() = 0;
+};
+
+// Writes raw I420 frames in sequence.
+class YuvFrameWriterImpl : public FrameWriter {
+ public:
+ // Creates a file handler. The input file is assumed to exist and be readable
+ // and the output file must be writable.
+ // Parameters:
+ // output_filename The file to write. Will be overwritten if already
+ // existing.
+ // width, height Size of each frame to read.
+ YuvFrameWriterImpl(std::string output_filename, int width, int height);
+ ~YuvFrameWriterImpl() override;
+ bool Init() override;
+ bool WriteFrame(const uint8_t* frame_buffer) override;
+ void Close() override;
+ size_t FrameLength() override;
+
+ protected:
+ const std::string output_filename_;
+ size_t frame_length_in_bytes_;
+ const int width_;
+ const int height_;
+ FILE* output_file_;
+};
+
+// Writes raw I420 frames in sequence, but with Y4M file and frame headers for
+// more convenient playback in external media players.
+class Y4mFrameWriterImpl : public YuvFrameWriterImpl {
+ public:
+ Y4mFrameWriterImpl(std::string output_filename,
+ int width,
+ int height,
+ int frame_rate);
+ ~Y4mFrameWriterImpl() override;
+ bool Init() override;
+ bool WriteFrame(const uint8_t* frame_buffer) override;
+
+ private:
+ const int frame_rate_;
+};
+
+// LibJpeg is not available on iOS. This class will do nothing on iOS.
+class JpegFrameWriter {
+ public:
+ JpegFrameWriter(const std::string& output_filename);
+ // Quality can be from 0 (worst) to 100 (best). Best quality is still lossy.
+ // WriteFrame can be called only once. Subsequent calls will fail.
+ bool WriteFrame(const VideoFrame& input_frame, int quality);
+
+#if !defined(WEBRTC_IOS)
+ private:
+ bool frame_written_;
+ const std::string output_filename_;
+ FILE* output_file_;
+#endif
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_FRAME_WRITER_H_
diff --git a/third_party/libwebrtc/test/testsupport/ios_file_utils.h b/third_party/libwebrtc/test/testsupport/ios_file_utils.h
new file mode 100644
index 0000000000..49df3b9010
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/ios_file_utils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_IOS_FILE_UTILS_H_
+#define TEST_TESTSUPPORT_IOS_FILE_UTILS_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+
+namespace webrtc {
+namespace test {
+
+std::string IOSOutputPath();
+std::string IOSRootPath();
+std::string IOSResourcePath(absl::string_view name,
+ absl::string_view extension);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_IOS_FILE_UTILS_H_
diff --git a/third_party/libwebrtc/test/testsupport/ios_file_utils.mm b/third_party/libwebrtc/test/testsupport/ios_file_utils.mm
new file mode 100644
index 0000000000..ef36937e6a
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/ios_file_utils.mm
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WEBRTC_IOS)
+
+#import <Foundation/Foundation.h>
+#include <string.h>
+
+#import "sdk/objc/helpers/NSString+StdString.h"
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+// For iOS, resource files are added to the application bundle in the root
+// and not in separate folders as is the case for other platforms. This method
+// therefore removes any prepended folders and uses only the actual file name.
+std::string IOSResourcePath(absl::string_view name, absl::string_view extension) {
+ @autoreleasepool {
+ NSString* path = [NSString stringForAbslStringView:name];
+ NSString* fileName = path.lastPathComponent;
+ NSString* fileType = [NSString stringForAbslStringView:extension];
+ // Get full pathname for the resource identified by the name and extension.
+ NSString* pathString = [[NSBundle mainBundle] pathForResource:fileName
+ ofType:fileType];
+ return [NSString stdStringForString:pathString];
+ }
+}
+
+std::string IOSRootPath() {
+ @autoreleasepool {
+ NSBundle* mainBundle = [NSBundle mainBundle];
+ return [NSString stdStringForString:mainBundle.bundlePath] + "/";
+ }
+}
+
+// For iOS, we don't have access to the output directory. Return the path to the
+// temporary directory instead. This is mostly used by tests that need to write
+// output files to disk.
+std::string IOSOutputPath() {
+ @autoreleasepool {
+ NSString* tempDir = NSTemporaryDirectory();
+ if (tempDir == nil)
+ tempDir = @"/tmp";
+ return [NSString stdStringForString:tempDir];
+ }
+}
+
+} // namespace test
+} // namespace webrtc
+
+#endif // defined(WEBRTC_IOS)
diff --git a/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc
new file mode 100644
index 0000000000..92700e192f
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/ivf_video_frame_generator.h"
+
+#include <limits>
+
+#include "api/video/encoded_image.h"
+#include "api/video/i420_buffer.h"
+#include "api/video_codecs/video_codec.h"
+#include "media/base/media_constants.h"
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/file_wrapper.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr TimeDelta kMaxNextFrameWaitTimeout = TimeDelta::Seconds(1);
+
+} // namespace
+
+IvfVideoFrameGenerator::IvfVideoFrameGenerator(const std::string& file_name)
+ : callback_(this),
+ file_reader_(IvfFileReader::Create(FileWrapper::OpenReadOnly(file_name))),
+ video_decoder_(CreateVideoDecoder(file_reader_->GetVideoCodecType())),
+ width_(file_reader_->GetFrameWidth()),
+ height_(file_reader_->GetFrameHeight()) {
+ RTC_CHECK(video_decoder_) << "No decoder found for file's video codec type";
+ VideoDecoder::Settings decoder_settings;
+ decoder_settings.set_codec_type(file_reader_->GetVideoCodecType());
+ decoder_settings.set_max_render_resolution(
+ {file_reader_->GetFrameWidth(), file_reader_->GetFrameHeight()});
+ // Set buffer pool size to max value to ensure that if users of generator,
+ // ex. test frameworks, will retain frames for quite a long time, decoder
+ // won't crash with buffers pool overflow error.
+ decoder_settings.set_buffer_pool_size(std::numeric_limits<int>::max());
+ RTC_CHECK_EQ(video_decoder_->RegisterDecodeCompleteCallback(&callback_),
+ WEBRTC_VIDEO_CODEC_OK);
+ RTC_CHECK(video_decoder_->Configure(decoder_settings));
+}
+IvfVideoFrameGenerator::~IvfVideoFrameGenerator() {
+ MutexLock lock(&lock_);
+ if (!file_reader_) {
+ return;
+ }
+ file_reader_->Close();
+ file_reader_.reset();
+ // Reset decoder to prevent it from async access to `this`.
+ video_decoder_.reset();
+ {
+ MutexLock frame_lock(&frame_decode_lock_);
+ next_frame_ = absl::nullopt;
+ // Set event in case another thread is waiting on it.
+ next_frame_decoded_.Set();
+ }
+}
+
+FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() {
+ MutexLock lock(&lock_);
+ next_frame_decoded_.Reset();
+ RTC_CHECK(file_reader_);
+ if (!file_reader_->HasMoreFrames()) {
+ file_reader_->Reset();
+ }
+ absl::optional<EncodedImage> image = file_reader_->NextFrame();
+ RTC_CHECK(image);
+ // Last parameter is undocumented and there is no usage of it found.
+ RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK,
+ video_decoder_->Decode(*image, /*missing_frames=*/false,
+ /*render_time_ms=*/0));
+ bool decoded = next_frame_decoded_.Wait(kMaxNextFrameWaitTimeout);
+ RTC_CHECK(decoded) << "Failed to decode next frame in "
+ << kMaxNextFrameWaitTimeout << ". Can't continue";
+
+ MutexLock frame_lock(&frame_decode_lock_);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ next_frame_->video_frame_buffer();
+ if (width_ != static_cast<size_t>(buffer->width()) ||
+ height_ != static_cast<size_t>(buffer->height())) {
+ // Video adapter has requested a down-scale. Allocate a new buffer and
+ // return scaled version.
+ rtc::scoped_refptr<I420Buffer> scaled_buffer =
+ I420Buffer::Create(width_, height_);
+ scaled_buffer->ScaleFrom(*buffer->ToI420());
+ buffer = scaled_buffer;
+ }
+ return VideoFrameData(buffer, next_frame_->update_rect());
+}
+
+void IvfVideoFrameGenerator::ChangeResolution(size_t width, size_t height) {
+ MutexLock lock(&lock_);
+ width_ = width;
+ height_ = height;
+}
+
+int32_t IvfVideoFrameGenerator::DecodedCallback::Decoded(
+ VideoFrame& decoded_image) {
+ Decoded(decoded_image, 0, 0);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+int32_t IvfVideoFrameGenerator::DecodedCallback::Decoded(
+ VideoFrame& decoded_image,
+ int64_t decode_time_ms) {
+ Decoded(decoded_image, decode_time_ms, 0);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+void IvfVideoFrameGenerator::DecodedCallback::Decoded(
+ VideoFrame& decoded_image,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) {
+ reader_->OnFrameDecoded(decoded_image);
+}
+
+void IvfVideoFrameGenerator::OnFrameDecoded(const VideoFrame& decoded_frame) {
+ MutexLock lock(&frame_decode_lock_);
+ next_frame_ = decoded_frame;
+ next_frame_decoded_.Set();
+}
+
+std::unique_ptr<VideoDecoder> IvfVideoFrameGenerator::CreateVideoDecoder(
+ VideoCodecType codec_type) {
+ if (codec_type == VideoCodecType::kVideoCodecVP8) {
+ return VP8Decoder::Create();
+ }
+ if (codec_type == VideoCodecType::kVideoCodecVP9) {
+ return VP9Decoder::Create();
+ }
+ if (codec_type == VideoCodecType::kVideoCodecH264) {
+ return H264Decoder::Create();
+ }
+ return nullptr;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.h b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.h
new file mode 100644
index 0000000000..4b6d116383
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_IVF_VIDEO_FRAME_GENERATOR_H_
+#define TEST_TESTSUPPORT_IVF_VIDEO_FRAME_GENERATOR_H_
+
+#include <memory>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "api/test/frame_generator_interface.h"
+#include "api/video/video_codec_type.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/utility/ivf_file_reader.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+
+namespace webrtc {
+namespace test {
+
+// All methods except constructor must be used from the same thread.
+class IvfVideoFrameGenerator : public FrameGeneratorInterface {
+ public:
+ explicit IvfVideoFrameGenerator(const std::string& file_name);
+ ~IvfVideoFrameGenerator() override;
+
+ VideoFrameData NextFrame() override;
+ void ChangeResolution(size_t width, size_t height) override;
+
+ private:
+ class DecodedCallback : public DecodedImageCallback {
+ public:
+ explicit DecodedCallback(IvfVideoFrameGenerator* reader)
+ : reader_(reader) {}
+
+ int32_t Decoded(VideoFrame& decoded_image) override;
+ int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override;
+ void Decoded(VideoFrame& decoded_image,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) override;
+
+ private:
+ IvfVideoFrameGenerator* const reader_;
+ };
+
+ void OnFrameDecoded(const VideoFrame& decoded_frame);
+ static std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ VideoCodecType codec_type);
+
+ DecodedCallback callback_;
+ std::unique_ptr<IvfFileReader> file_reader_;
+ std::unique_ptr<VideoDecoder> video_decoder_;
+
+ size_t width_;
+ size_t height_;
+
+ // This lock is used to ensure that all API method will be called
+ // sequentially. It is required because we need to ensure that generator
+ // won't be destroyed while it is reading the next frame on another thread,
+ // because it will cause SIGSEGV when decoder callback will be invoked.
+ //
+ // FrameGenerator is injected into PeerConnection via some scoped_ref object
+ // and it can happen that the last pointer will be destroyed on the different
+ // thread comparing to the one from which frames were read.
+ Mutex lock_;
+ // This lock is used to sync between sending and receiving frame from decoder.
+ // We can't reuse `lock_` because then generator can be destroyed between
+ // frame was sent to decoder and decoder callback was invoked.
+ Mutex frame_decode_lock_;
+
+ rtc::Event next_frame_decoded_;
+ absl::optional<VideoFrame> next_frame_ RTC_GUARDED_BY(frame_decode_lock_);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_IVF_VIDEO_FRAME_GENERATOR_H_
diff --git a/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator_unittest.cc b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator_unittest.cc
new file mode 100644
index 0000000000..dd60f3d3fc
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator_unittest.cc
@@ -0,0 +1,212 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/ivf_video_frame_generator.h"
+
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/test/create_frame_generator.h"
+#include "api/units/time_delta.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_codec_type.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/utility/ivf_file_writer.h"
+#include "rtc_base/event.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/video_codec_settings.h"
+
+#if defined(WEBRTC_USE_H264)
+#include "modules/video_coding/codecs/h264/include/h264.h"
+#include "rtc_base/synchronization/mutex.h"
+
+#endif
+
+namespace webrtc {
+namespace test {
+namespace {
+
+constexpr int kWidth = 320;
+constexpr int kHeight = 240;
+constexpr int kVideoFramesCount = 30;
+constexpr int kMaxFramerate = 30;
+constexpr TimeDelta kMaxFrameEncodeWaitTimeout = TimeDelta::Seconds(2);
+static const VideoEncoder::Capabilities kCapabilities(false);
+
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM64)
+constexpr double kExpectedMinPsnr = 35;
+#else
+constexpr double kExpectedMinPsnr = 39;
+#endif
+
+class IvfFileWriterEncodedCallback : public EncodedImageCallback {
+ public:
+ IvfFileWriterEncodedCallback(const std::string& file_name,
+ VideoCodecType video_codec_type,
+ int expected_frames_count)
+ : file_writer_(
+ IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(file_name), 0)),
+ video_codec_type_(video_codec_type),
+ expected_frames_count_(expected_frames_count) {
+ EXPECT_TRUE(file_writer_.get());
+ }
+ ~IvfFileWriterEncodedCallback() { EXPECT_TRUE(file_writer_->Close()); }
+
+ Result OnEncodedImage(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) override {
+ EXPECT_TRUE(file_writer_->WriteFrame(encoded_image, video_codec_type_));
+
+ MutexLock lock(&lock_);
+ received_frames_count_++;
+ RTC_CHECK_LE(received_frames_count_, expected_frames_count_);
+ if (received_frames_count_ == expected_frames_count_) {
+ expected_frames_count_received_.Set();
+ }
+ return Result(Result::Error::OK);
+ }
+
+ bool WaitForExpectedFramesReceived(TimeDelta timeout) {
+ return expected_frames_count_received_.Wait(timeout);
+ }
+
+ private:
+ std::unique_ptr<IvfFileWriter> file_writer_;
+ const VideoCodecType video_codec_type_;
+ const int expected_frames_count_;
+
+ Mutex lock_;
+ int received_frames_count_ RTC_GUARDED_BY(lock_) = 0;
+ rtc::Event expected_frames_count_received_;
+};
+
+class IvfVideoFrameGeneratorTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ file_name_ =
+ webrtc::test::TempFilename(webrtc::test::OutputPath(), "test_file.ivf");
+ }
+ void TearDown() override { webrtc::test::RemoveFile(file_name_); }
+
+ VideoFrame BuildFrame(FrameGeneratorInterface::VideoFrameData frame_data) {
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(frame_data.buffer)
+ .set_update_rect(frame_data.update_rect)
+ .build();
+ }
+
+ void CreateTestVideoFile(VideoCodecType video_codec_type,
+ std::unique_ptr<VideoEncoder> video_encoder) {
+ std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
+ test::CreateSquareFrameGenerator(
+ kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
+ absl::nullopt);
+
+ VideoCodec codec_settings;
+ webrtc::test::CodecSettings(video_codec_type, &codec_settings);
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.maxFramerate = kMaxFramerate;
+ const uint32_t kBitrateBps = 500000;
+ VideoBitrateAllocation bitrate_allocation;
+ bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
+
+ IvfFileWriterEncodedCallback ivf_writer_callback(
+ file_name_, video_codec_type, kVideoFramesCount);
+
+ video_encoder->RegisterEncodeCompleteCallback(&ivf_writer_callback);
+ video_encoder->SetRates(VideoEncoder::RateControlParameters(
+ bitrate_allocation, static_cast<double>(codec_settings.maxFramerate)));
+ ASSERT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ video_encoder->InitEncode(
+ &codec_settings,
+ VideoEncoder::Settings(kCapabilities, /*number_of_cores=*/1,
+ /*max_payload_size=*/0)));
+
+ uint32_t last_frame_timestamp = 0;
+
+ for (int i = 0; i < kVideoFramesCount; ++i) {
+ VideoFrame frame = BuildFrame(frame_generator->NextFrame());
+ const uint32_t timestamp =
+ last_frame_timestamp +
+ kVideoPayloadTypeFrequency / codec_settings.maxFramerate;
+ frame.set_timestamp(timestamp);
+
+ last_frame_timestamp = timestamp;
+
+ ASSERT_EQ(WEBRTC_VIDEO_CODEC_OK, video_encoder->Encode(frame, nullptr));
+ video_frames_.push_back(frame);
+ }
+
+ ASSERT_TRUE(ivf_writer_callback.WaitForExpectedFramesReceived(
+ kMaxFrameEncodeWaitTimeout));
+ }
+
+ std::string file_name_;
+ std::vector<VideoFrame> video_frames_;
+};
+
+} // namespace
+
+TEST_F(IvfVideoFrameGeneratorTest, Vp8) {
+ CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, VP8Encoder::Create());
+ IvfVideoFrameGenerator generator(file_name_);
+ for (size_t i = 0; i < video_frames_.size(); ++i) {
+ auto& expected_frame = video_frames_[i];
+ VideoFrame actual_frame = BuildFrame(generator.NextFrame());
+ EXPECT_GT(I420PSNR(&expected_frame, &actual_frame), kExpectedMinPsnr);
+ }
+}
+
+TEST_F(IvfVideoFrameGeneratorTest, Vp8DoubleRead) {
+ CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, VP8Encoder::Create());
+ IvfVideoFrameGenerator generator(file_name_);
+ for (size_t i = 0; i < video_frames_.size() * 2; ++i) {
+ auto& expected_frame = video_frames_[i % video_frames_.size()];
+ VideoFrame actual_frame = BuildFrame(generator.NextFrame());
+ EXPECT_GT(I420PSNR(&expected_frame, &actual_frame), kExpectedMinPsnr);
+ }
+}
+
+TEST_F(IvfVideoFrameGeneratorTest, Vp9) {
+ CreateTestVideoFile(VideoCodecType::kVideoCodecVP9, VP9Encoder::Create());
+ IvfVideoFrameGenerator generator(file_name_);
+ for (size_t i = 0; i < video_frames_.size(); ++i) {
+ auto& expected_frame = video_frames_[i];
+ VideoFrame actual_frame = BuildFrame(generator.NextFrame());
+ EXPECT_GT(I420PSNR(&expected_frame, &actual_frame), kExpectedMinPsnr);
+ }
+}
+
+#if defined(WEBRTC_USE_H264)
+TEST_F(IvfVideoFrameGeneratorTest, H264) {
+ CreateTestVideoFile(
+ VideoCodecType::kVideoCodecH264,
+ H264Encoder::Create(cricket::VideoCodec(cricket::kH264CodecName)));
+ IvfVideoFrameGenerator generator(file_name_);
+ for (size_t i = 0; i < video_frames_.size(); ++i) {
+ auto& expected_frame = video_frames_[i];
+ VideoFrame actual_frame = BuildFrame(generator.NextFrame());
+ EXPECT_GT(I420PSNR(&expected_frame, &actual_frame), kExpectedMinPsnr);
+ }
+}
+#endif
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/jpeg_frame_writer.cc b/third_party/libwebrtc/test/testsupport/jpeg_frame_writer.cc
new file mode 100644
index 0000000000..8bf1ee4630
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/jpeg_frame_writer.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "test/testsupport/frame_writer.h"
+
+extern "C" {
+#if defined(USE_SYSTEM_LIBJPEG)
+#include <jpeglib.h>
+#else
+// Include directory supplied by gn
+#include "jpeglib.h" // NOLINT
+#endif
+}
+
+namespace webrtc {
+namespace test {
+
+JpegFrameWriter::JpegFrameWriter(const std::string& output_filename)
+ : frame_written_(false),
+ output_filename_(output_filename),
+ output_file_(nullptr) {}
+
+bool JpegFrameWriter::WriteFrame(const VideoFrame& input_frame, int quality) {
+ if (frame_written_) {
+ RTC_LOG(LS_ERROR) << "Only a single frame can be saved to Jpeg.";
+ return false;
+ }
+ const int kColorPlanes = 3; // R, G and B.
+ size_t rgb_len = input_frame.height() * input_frame.width() * kColorPlanes;
+ std::unique_ptr<uint8_t[]> rgb_buf(new uint8_t[rgb_len]);
+
+ // kRGB24 actually corresponds to FourCC 24BG which is 24-bit BGR.
+ if (ConvertFromI420(input_frame, VideoType::kRGB24, 0, rgb_buf.get()) < 0) {
+ RTC_LOG(LS_ERROR) << "Could not convert input frame to RGB.";
+ return false;
+ }
+ output_file_ = fopen(output_filename_.c_str(), "wb");
+ if (!output_file_) {
+ RTC_LOG(LS_ERROR) << "Couldn't open file to write jpeg frame to:"
+ << output_filename_;
+ return false;
+ }
+
+ // Invoking LIBJPEG
+ struct jpeg_compress_struct cinfo;
+ struct jpeg_error_mgr jerr;
+ JSAMPROW row_pointer[1];
+ cinfo.err = jpeg_std_error(&jerr);
+ jpeg_create_compress(&cinfo);
+
+ jpeg_stdio_dest(&cinfo, output_file_);
+
+ cinfo.image_width = input_frame.width();
+ cinfo.image_height = input_frame.height();
+ cinfo.input_components = kColorPlanes;
+ cinfo.in_color_space = JCS_EXT_BGR;
+ jpeg_set_defaults(&cinfo);
+ jpeg_set_quality(&cinfo, quality, TRUE);
+
+ jpeg_start_compress(&cinfo, TRUE);
+ int row_stride = input_frame.width() * kColorPlanes;
+ while (cinfo.next_scanline < cinfo.image_height) {
+ row_pointer[0] = &rgb_buf.get()[cinfo.next_scanline * row_stride];
+ jpeg_write_scanlines(&cinfo, row_pointer, 1);
+ }
+
+ jpeg_finish_compress(&cinfo);
+ jpeg_destroy_compress(&cinfo);
+ fclose(output_file_);
+
+ frame_written_ = true;
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/jpeg_frame_writer_ios.cc b/third_party/libwebrtc/test/testsupport/jpeg_frame_writer_ios.cc
new file mode 100644
index 0000000000..e72fea102f
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/jpeg_frame_writer_ios.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+JpegFrameWriter::JpegFrameWriter(const std::string& /*output_filename*/) {}
+
+bool JpegFrameWriter::WriteFrame(const VideoFrame& /*input_frame*/,
+ int /*quality*/) {
+ RTC_LOG(LS_WARNING)
+ << "Libjpeg isn't available on IOS. Jpeg frame writer is not "
+ "supported. No frame will be saved.";
+ // Don't fail.
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/mac_file_utils.h b/third_party/libwebrtc/test/testsupport/mac_file_utils.h
new file mode 100644
index 0000000000..c6cbdc580d
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/mac_file_utils.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_MAC_FILE_UTILS_H_
+#define TEST_TESTSUPPORT_MAC_FILE_UTILS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace test {
+
+void GetNSExecutablePath(std::string* path);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_MAC_FILE_UTILS_H_
diff --git a/third_party/libwebrtc/test/testsupport/mac_file_utils.mm b/third_party/libwebrtc/test/testsupport/mac_file_utils.mm
new file mode 100644
index 0000000000..270ecbc9a5
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/mac_file_utils.mm
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#include <dlfcn.h>
+#include <mach-o/dyld.h>
+#include <stdint.h>
+#include <stdlib.h>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+void GetNSExecutablePath(std::string* path) {
+ RTC_DCHECK(path);
+ // Executable path can have relative references ("..") depending on
+ // how the app was launched.
+ uint32_t executable_length = 0;
+ _NSGetExecutablePath(NULL, &executable_length);
+ RTC_DCHECK_GT(executable_length, 1u);
+ char executable_path[PATH_MAX + 1];
+ int rv = _NSGetExecutablePath(executable_path, &executable_length);
+ RTC_DCHECK_EQ(rv, 0);
+
+ char full_path[PATH_MAX];
+ if (realpath(executable_path, full_path) == nullptr) {
+ *path = "";
+ return;
+ }
+
+ *path = full_path;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h b/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h
new file mode 100644
index 0000000000..f68bbf8368
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/mock/mock_frame_reader.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
+#define TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
+
+#include "api/video/i420_buffer.h"
+#include "test/gmock.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+
+class MockFrameReader : public FrameReader {
+ public:
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>, PullFrame, (), (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>, PullFrame, (int*), (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>,
+ PullFrame,
+ (int*, Resolution, Ratio),
+ (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>, ReadFrame, (int), (override));
+ MOCK_METHOD(rtc::scoped_refptr<I420Buffer>,
+ ReadFrame,
+ (int, Resolution),
+ (override));
+ MOCK_METHOD(int, num_frames, (), (const override));
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_MOCK_MOCK_FRAME_READER_H_
diff --git a/third_party/libwebrtc/test/testsupport/perf_test.cc b/third_party/libwebrtc/test/testsupport/perf_test.cc
new file mode 100644
index 0000000000..bbea5f841a
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/perf_test.h"
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <fstream>
+#include <set>
+#include <sstream>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/perf_test_histogram_writer.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+std::string UnitWithDirection(
+ absl::string_view units,
+ webrtc::test::ImproveDirection improve_direction) {
+ switch (improve_direction) {
+ case webrtc::test::ImproveDirection::kNone:
+ return std::string(units);
+ case webrtc::test::ImproveDirection::kSmallerIsBetter:
+ return std::string(units) + "_smallerIsBetter";
+ case webrtc::test::ImproveDirection::kBiggerIsBetter:
+ return std::string(units) + "_biggerIsBetter";
+ }
+}
+
+std::vector<SamplesStatsCounter::StatsSample> GetSortedSamples(
+ const SamplesStatsCounter& counter) {
+ rtc::ArrayView<const SamplesStatsCounter::StatsSample> view =
+ counter.GetTimedSamples();
+ std::vector<SamplesStatsCounter::StatsSample> out(view.begin(), view.end());
+ std::stable_sort(out.begin(), out.end(),
+ [](const SamplesStatsCounter::StatsSample& a,
+ const SamplesStatsCounter::StatsSample& b) {
+ return a.time < b.time;
+ });
+ return out;
+}
+
+template <typename Container>
+void OutputListToStream(std::ostream* ostream, const Container& values) {
+ const char* sep = "";
+ for (const auto& v : values) {
+ (*ostream) << sep << v;
+ sep = ",";
+ }
+}
+
+struct PlottableCounter {
+ std::string graph_name;
+ std::string trace_name;
+ webrtc::SamplesStatsCounter counter;
+ std::string units;
+};
+
+class PlottableCounterPrinter {
+ public:
+ PlottableCounterPrinter() : output_(stdout) {}
+
+ void SetOutput(FILE* output) {
+ MutexLock lock(&mutex_);
+ output_ = output;
+ }
+
+ void AddCounter(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const webrtc::SamplesStatsCounter& counter,
+ absl::string_view units) {
+ MutexLock lock(&mutex_);
+ plottable_counters_.push_back({std::string(graph_name),
+ std::string(trace_name), counter,
+ std::string(units)});
+ }
+
+ void Print(const std::vector<std::string>& desired_graphs_raw) const {
+ std::set<std::string> desired_graphs(desired_graphs_raw.begin(),
+ desired_graphs_raw.end());
+ MutexLock lock(&mutex_);
+ for (auto& counter : plottable_counters_) {
+ if (!desired_graphs.empty()) {
+ auto it = desired_graphs.find(counter.graph_name);
+ if (it == desired_graphs.end()) {
+ continue;
+ }
+ }
+
+ std::ostringstream value_stream;
+ value_stream.precision(8);
+ value_stream << R"({"graph_name":")" << counter.graph_name << R"(",)";
+ value_stream << R"("trace_name":")" << counter.trace_name << R"(",)";
+ value_stream << R"("units":")" << counter.units << R"(",)";
+ if (!counter.counter.IsEmpty()) {
+ value_stream << R"("mean":)" << counter.counter.GetAverage() << ',';
+ value_stream << R"("std":)" << counter.counter.GetStandardDeviation()
+ << ',';
+ }
+ value_stream << R"("samples":[)";
+ const char* sep = "";
+ for (const auto& sample : counter.counter.GetTimedSamples()) {
+ value_stream << sep << R"({"time":)" << sample.time.us() << ','
+ << R"("value":)" << sample.value << '}';
+ sep = ",";
+ }
+ value_stream << "]}";
+
+ fprintf(output_, "PLOTTABLE_DATA: %s\n", value_stream.str().c_str());
+ }
+ }
+
+ private:
+ mutable Mutex mutex_;
+ std::vector<PlottableCounter> plottable_counters_ RTC_GUARDED_BY(&mutex_);
+ FILE* output_ RTC_GUARDED_BY(&mutex_);
+};
+
+PlottableCounterPrinter& GetPlottableCounterPrinter() {
+ static PlottableCounterPrinter* printer_ = new PlottableCounterPrinter();
+ return *printer_;
+}
+
+class ResultsLinePrinter {
+ public:
+ ResultsLinePrinter() : output_(stdout) {}
+
+ void SetOutput(FILE* output) {
+ MutexLock lock(&mutex_);
+ output_ = output;
+ }
+
+ void PrintResult(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const double value,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction) {
+ std::ostringstream value_stream;
+ value_stream.precision(8);
+ value_stream << value;
+
+ PrintResultImpl(graph_name, trace_name, value_stream.str(), std::string(),
+ std::string(), UnitWithDirection(units, improve_direction),
+ important);
+ }
+
+ void PrintResultMeanAndError(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const double mean,
+ const double error,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction) {
+ std::ostringstream value_stream;
+ value_stream.precision(8);
+ value_stream << mean << ',' << error;
+ PrintResultImpl(graph_name, trace_name, value_stream.str(), "{", "}",
+ UnitWithDirection(units, improve_direction), important);
+ }
+
+ void PrintResultList(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const rtc::ArrayView<const double> values,
+ absl::string_view units,
+ const bool important,
+ webrtc::test::ImproveDirection improve_direction) {
+ std::ostringstream value_stream;
+ value_stream.precision(8);
+ OutputListToStream(&value_stream, values);
+ PrintResultImpl(graph_name, trace_name, value_stream.str(), "[", "]", units,
+ important);
+ }
+
+ private:
+ void PrintResultImpl(absl::string_view graph_name,
+ absl::string_view trace_name,
+ absl::string_view values,
+ absl::string_view prefix,
+ absl::string_view suffix,
+ absl::string_view units,
+ bool important) {
+ MutexLock lock(&mutex_);
+ rtc::StringBuilder message;
+ message << (important ? "*" : "") << "RESULT " << graph_name << ": "
+ << trace_name << "= " << prefix << values << suffix << " " << units;
+ // <*>RESULT <graph_name>: <trace_name>= <value> <units>
+ // <*>RESULT <graph_name>: <trace_name>= {<mean>, <std deviation>} <units>
+ // <*>RESULT <graph_name>: <trace_name>= [<value>,value,value,...,] <units>
+ fprintf(output_, "%s\n", message.str().c_str());
+ }
+
+ Mutex mutex_;
+ FILE* output_ RTC_GUARDED_BY(&mutex_);
+};
+
+ResultsLinePrinter& GetResultsLinePrinter() {
+ static ResultsLinePrinter* const printer_ = new ResultsLinePrinter();
+ return *printer_;
+}
+
+PerfTestResultWriter& GetPerfWriter() {
+ static PerfTestResultWriter* writer = CreateHistogramWriter();
+ return *writer;
+}
+
+} // namespace
+
+void ClearPerfResults() {
+ GetPerfWriter().ClearResults();
+}
+
+void SetPerfResultsOutput(FILE* output) {
+ GetPlottableCounterPrinter().SetOutput(output);
+ GetResultsLinePrinter().SetOutput(output);
+}
+
+std::string GetPerfResults() {
+ return GetPerfWriter().Serialize();
+}
+
+void PrintPlottableResults(const std::vector<std::string>& desired_graphs) {
+ GetPlottableCounterPrinter().Print(desired_graphs);
+}
+
+bool WritePerfResults(const std::string& output_path) {
+ std::string results = GetPerfResults();
+ CreateDir(DirName(output_path));
+ FILE* output = fopen(output_path.c_str(), "wb");
+ if (output == NULL) {
+ printf("Failed to write to %s.\n", output_path.c_str());
+ return false;
+ }
+ size_t written =
+ fwrite(results.c_str(), sizeof(char), results.size(), output);
+ fclose(output);
+
+ if (written != results.size()) {
+ long expected = results.size();
+ printf("Wrote %zu, tried to write %lu\n", written, expected);
+ return false;
+ }
+
+ return true;
+}
+
+void PrintResult(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view trace,
+ const double value,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction) {
+ rtc::StringBuilder graph_name;
+ graph_name << measurement << modifier;
+ RTC_CHECK(std::isfinite(value))
+ << "Expected finite value for graph " << graph_name.str()
+ << ", trace name " << trace << ", units " << units << ", got " << value;
+ GetPerfWriter().LogResult(graph_name.str(), trace, value, units, important,
+ improve_direction);
+ GetResultsLinePrinter().PrintResult(graph_name.str(), trace, value, units,
+ important, improve_direction);
+}
+
+void PrintResult(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view trace,
+ const SamplesStatsCounter& counter,
+ absl::string_view units,
+ const bool important,
+ ImproveDirection improve_direction) {
+ rtc::StringBuilder graph_name;
+ graph_name << measurement << modifier;
+ GetPlottableCounterPrinter().AddCounter(graph_name.str(), trace, counter,
+ units);
+
+ double mean = counter.IsEmpty() ? 0 : counter.GetAverage();
+ double error = counter.IsEmpty() ? 0 : counter.GetStandardDeviation();
+
+ std::vector<SamplesStatsCounter::StatsSample> timed_samples =
+ GetSortedSamples(counter);
+ std::vector<double> samples(timed_samples.size());
+ for (size_t i = 0; i < timed_samples.size(); ++i) {
+ samples[i] = timed_samples[i].value;
+ }
+ // If we have an empty counter, default it to 0.
+ if (samples.empty()) {
+ samples.push_back(0);
+ }
+
+ GetPerfWriter().LogResultList(graph_name.str(), trace, samples, units,
+ important, improve_direction);
+ GetResultsLinePrinter().PrintResultMeanAndError(graph_name.str(), trace, mean,
+ error, units, important,
+ improve_direction);
+}
+
+void PrintResultMeanAndError(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view trace,
+ const double mean,
+ const double error,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction) {
+ RTC_CHECK(std::isfinite(mean));
+ RTC_CHECK(std::isfinite(error));
+
+ rtc::StringBuilder graph_name;
+ graph_name << measurement << modifier;
+ GetPerfWriter().LogResultMeanAndError(graph_name.str(), trace, mean, error,
+ units, important, improve_direction);
+ GetResultsLinePrinter().PrintResultMeanAndError(graph_name.str(), trace, mean,
+ error, units, important,
+ improve_direction);
+}
+
+void PrintResultList(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view trace,
+ const rtc::ArrayView<const double> values,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction) {
+ for (double v : values) {
+ RTC_CHECK(std::isfinite(v));
+ }
+
+ rtc::StringBuilder graph_name;
+ graph_name << measurement << modifier;
+ GetPerfWriter().LogResultList(graph_name.str(), trace, values, units,
+ important, improve_direction);
+ GetResultsLinePrinter().PrintResultList(graph_name.str(), trace, values,
+ units, important, improve_direction);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/perf_test.h b/third_party/libwebrtc/test/testsupport/perf_test.h
new file mode 100644
index 0000000000..732fff7d14
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_PERF_TEST_H_
+#define TEST_TESTSUPPORT_PERF_TEST_H_
+
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/array_view.h"
+#include "api/numerics/samples_stats_counter.h"
+
+namespace webrtc {
+namespace test {
+
+enum class ImproveDirection {
+ // Direction is undefined.
+ kNone,
+ // Smaller value is better.
+ kSmallerIsBetter,
+ // Bigger value is better.
+ kBiggerIsBetter,
+};
+
+// Prints a performance test result.
+//
+// For example,
+// PrintResult("ramp_up_time_", "turn_over_tcp",
+// "bwe_15s", 1234.2, "ms", false);
+//
+// will show up in the http://chromeperf.appspot.com under
+//
+// (test binary name) > (bot) > ramp_up_time_turn_over_tcp > bwe_15s.
+//
+// The `measurement` + `modifier` is what we're measuring. `user_story` is the
+// scenario we're testing under.
+//
+// The binary this runs in must be hooked up as a perf test in the WebRTC
+// recipes for this to actually be uploaded to chromeperf.appspot.com.
+void PrintResult(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view user_story,
+ double value,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction = ImproveDirection::kNone);
+
+// Like PrintResult(), but prints a (mean, standard deviation) result pair.
+// The |<values>| should be two comma-separated numbers, the mean and
+// standard deviation (or other error metric) of the measurement.
+// DEPRECATED: soon unsupported.
+void PrintResultMeanAndError(
+ absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view user_story,
+ double mean,
+ double error,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction = ImproveDirection::kNone);
+
+// Like PrintResult(), but prints an entire list of results. The `values`
+// will generally be a list of comma-separated numbers. A typical
+// post-processing step might produce plots of their mean and standard
+// deviation.
+void PrintResultList(
+ absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view user_story,
+ rtc::ArrayView<const double> values,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction = ImproveDirection::kNone);
+
+// Like PrintResult(), but prints a (mean, standard deviation) from stats
+// counter. Also add specified metric to the plotable metrics output.
+void PrintResult(absl::string_view measurement,
+ absl::string_view modifier,
+ absl::string_view user_story,
+ const SamplesStatsCounter& counter,
+ absl::string_view units,
+ bool important,
+ ImproveDirection improve_direction = ImproveDirection::kNone);
+
+// Returns a string-encoded proto as described in
+// tracing/tracing/proto/histogram.proto in
+// https://github.com/catapult-project/catapult/blob/master/.
+// If you want to print the proto in human readable format, use
+// tracing/bin/proto2json from third_party/catapult in your WebRTC checkout.
+std::string GetPerfResults();
+
+// Print into stdout plottable metrics for further post processing.
+// `desired_graphs` - list of metrics, that should be plotted. If empty - all
+// available metrics will be plotted. If some of `desired_graphs` are missing
+// they will be skipped.
+void PrintPlottableResults(const std::vector<std::string>& desired_graphs);
+
+// Call GetPerfResults() and write its output to a file. Returns false if we
+// failed to write to the file. If you want to print the proto in human readable
+// format, use tracing/bin/proto2json from third_party/catapult in your WebRTC
+// checkout.
+bool WritePerfResults(const std::string& output_path);
+
+// By default, human-readable perf results are printed to stdout. Set the FILE*
+// to where they should be printing instead. These results are not used to
+// upload to the dashboard, however - this is only through WritePerfResults.
+void SetPerfResultsOutput(FILE* output);
+
+// Only for use by tests.
+void ClearPerfResults();
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_PERF_TEST_H_
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.cc b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.cc
new file mode 100644
index 0000000000..93924ba16c
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.cc
@@ -0,0 +1,201 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/perf_test_histogram_writer.h"
+
+#include <stdlib.h>
+
+#include <map>
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "api/numerics/samples_stats_counter.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "third_party/catapult/tracing/tracing/value/diagnostics/reserved_infos.h"
+#include "third_party/catapult/tracing/tracing/value/histogram.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+namespace proto = catapult::tracing::tracing::proto;
+
+std::string AsJsonString(const std::string string) {
+ return "\"" + string + "\"";
+}
+
+class PerfTestHistogramWriter : public PerfTestResultWriter {
+ public:
+ PerfTestHistogramWriter() : mutex_() {}
+ void ClearResults() override {
+ MutexLock lock(&mutex_);
+ histograms_.clear();
+ }
+
+ void LogResult(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const double value,
+ absl::string_view units,
+ const bool important,
+ ImproveDirection improve_direction) override {
+ (void)important;
+ AddSample(graph_name, trace_name, value, units, improve_direction);
+ }
+ void LogResultMeanAndError(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const double mean,
+ const double error,
+ absl::string_view units,
+ const bool important,
+ ImproveDirection improve_direction) override {
+ RTC_LOG(LS_WARNING) << "Discarding stddev, not supported by histograms";
+ (void)error;
+ (void)important;
+
+ AddSample(graph_name, trace_name, mean, units, improve_direction);
+ }
+ void LogResultList(absl::string_view graph_name,
+ absl::string_view trace_name,
+ const rtc::ArrayView<const double> values,
+ absl::string_view units,
+ const bool important,
+ ImproveDirection improve_direction) override {
+ (void)important;
+ for (double value : values) {
+ AddSample(graph_name, trace_name, value, units, improve_direction);
+ }
+ }
+ std::string Serialize() const override {
+ proto::HistogramSet histogram_set;
+
+ MutexLock lock(&mutex_);
+ for (const auto& histogram : histograms_) {
+ std::unique_ptr<proto::Histogram> proto = histogram.second->toProto();
+ histogram_set.mutable_histograms()->AddAllocated(proto.release());
+ }
+
+ std::string output;
+ bool ok = histogram_set.SerializeToString(&output);
+ RTC_DCHECK(ok) << "Failed to serialize histogram set to string";
+ return output;
+ }
+
+ private:
+ void AddSample(absl::string_view original_graph_name,
+ absl::string_view trace_name,
+ const double value,
+ absl::string_view units,
+ ImproveDirection improve_direction) {
+ // WebRTC annotates the units into the metric name when they are not
+ // supported by the Histogram API.
+ std::string graph_name(original_graph_name);
+ if (units == "dB") {
+ graph_name += "_dB";
+ } else if (units == "fps") {
+ graph_name += "_fps";
+ } else if (units == "%") {
+ graph_name += "_%";
+ }
+
+ // Lookup on graph name + trace name (or measurement + story in catapult
+ // parlance). There should be several histograms with the same measurement
+ // if they're for different stories.
+ rtc::StringBuilder measurement_and_story;
+ measurement_and_story << graph_name << trace_name;
+ MutexLock lock(&mutex_);
+ if (histograms_.count(measurement_and_story.str()) == 0) {
+ proto::UnitAndDirection unit = ParseUnit(units, improve_direction);
+ std::unique_ptr<catapult::HistogramBuilder> builder =
+ std::make_unique<catapult::HistogramBuilder>(graph_name, unit);
+
+ // Set all summary options as false - we don't want to generate
+ // metric_std, metric_count, and so on for all metrics.
+ builder->SetSummaryOptions(proto::SummaryOptions());
+ histograms_[measurement_and_story.str()] = std::move(builder);
+
+ proto::Diagnostic stories;
+ proto::GenericSet* generic_set = stories.mutable_generic_set();
+ generic_set->add_values(AsJsonString(std::string(trace_name)));
+ histograms_[measurement_and_story.str()]->AddDiagnostic(
+ catapult::kStoriesDiagnostic, stories);
+ }
+
+ if (units == "bps") {
+ // Bps has been interpreted as bits per second in WebRTC tests.
+ histograms_[measurement_and_story.str()]->AddSample(value / 8);
+ } else {
+ histograms_[measurement_and_story.str()]->AddSample(value);
+ }
+ }
+
+ proto::UnitAndDirection ParseUnit(absl::string_view units,
+ ImproveDirection improve_direction) {
+ RTC_DCHECK(units.find('_') == std::string::npos)
+ << "The unit_bigger|smallerIsBetter syntax isn't supported in WebRTC, "
+ "use the enum instead.";
+
+ proto::UnitAndDirection result;
+ result.set_improvement_direction(ParseDirection(improve_direction));
+ if (units == "bps") {
+ result.set_unit(proto::BYTES_PER_SECOND);
+ } else if (units == "dB") {
+ result.set_unit(proto::UNITLESS);
+ } else if (units == "fps") {
+ result.set_unit(proto::HERTZ);
+ } else if (units == "frames") {
+ result.set_unit(proto::COUNT);
+ } else if (units == "ms") {
+ result.set_unit(proto::MS_BEST_FIT_FORMAT);
+ } else if (units == "%") {
+ result.set_unit(proto::UNITLESS);
+ } else {
+ proto::Unit unit = catapult::UnitFromJsonUnit(std::string(units));
+
+ // UnitFromJsonUnit returns UNITLESS if it doesn't recognize the unit.
+ if (unit == proto::UNITLESS && units != "unitless") {
+ RTC_LOG(LS_WARNING) << "Unit " << units << " is unsupported.";
+ }
+
+ result.set_unit(unit);
+ }
+ return result;
+ }
+
+ proto::ImprovementDirection ParseDirection(
+ ImproveDirection improve_direction) {
+ switch (improve_direction) {
+ case ImproveDirection::kNone:
+ return proto::NOT_SPECIFIED;
+ case ImproveDirection::kSmallerIsBetter:
+ return proto::SMALLER_IS_BETTER;
+ case ImproveDirection::kBiggerIsBetter:
+ return proto::BIGGER_IS_BETTER;
+ default:
+ RTC_DCHECK_NOTREACHED() << "Invalid enum value " << improve_direction;
+ }
+ }
+
+ private:
+ mutable Mutex mutex_;
+ std::map<std::string, std::unique_ptr<catapult::HistogramBuilder>> histograms_
+ RTC_GUARDED_BY(&mutex_);
+};
+
+} // namespace
+
+PerfTestResultWriter* CreateHistogramWriter() {
+ return new PerfTestHistogramWriter();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.h b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.h
new file mode 100644
index 0000000000..244e69fc45
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_PERF_TEST_HISTOGRAM_WRITER_H_
+#define TEST_TESTSUPPORT_PERF_TEST_HISTOGRAM_WRITER_H_
+
+#include "test/testsupport/perf_test_result_writer.h"
+
+namespace webrtc {
+namespace test {
+
+PerfTestResultWriter* CreateHistogramWriter();
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_PERF_TEST_HISTOGRAM_WRITER_H_
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_no_protobuf.cc b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_no_protobuf.cc
new file mode 100644
index 0000000000..6bc810b94d
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_no_protobuf.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/perf_test_histogram_writer.h"
+
+namespace webrtc {
+namespace test {
+
+PerfTestResultWriter* CreateHistogramWriter() {
+ RTC_DCHECK_NOTREACHED()
+ << "Cannot run perf tests with rtc_enable_protobuf = false. "
+ "Perf write results as protobufs.";
+ return nullptr;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_unittest.cc b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_unittest.cc
new file mode 100644
index 0000000000..83025a7447
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_histogram_writer_unittest.cc
@@ -0,0 +1,216 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/perf_test_histogram_writer.h"
+
+#include <memory>
+#include <string>
+
+#include "test/gtest.h"
+#include "third_party/catapult/tracing/tracing/value/histogram.h"
+
+namespace webrtc {
+namespace test {
+
+namespace proto = catapult::tracing::tracing::proto;
+
+TEST(PerfHistogramWriterUnittest, TestSimpleHistogram) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("-", "-", 0, "ms", false, ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ EXPECT_TRUE(histogram_set.ParseFromString(writer->Serialize()))
+ << "Expected valid histogram set";
+
+ ASSERT_EQ(histogram_set.histograms_size(), 1);
+}
+
+TEST(PerfHistogramWriterUnittest, TestListOfValuesHistogram) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ std::vector<double> samples{0, 1, 2};
+ writer->LogResultList("-", "-", samples, "ms", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ EXPECT_TRUE(histogram_set.ParseFromString(writer->Serialize()))
+ << "Expected valid histogram set";
+
+ ASSERT_EQ(histogram_set.histograms_size(), 1);
+ ASSERT_EQ(histogram_set.histograms(0).sample_values_size(), 3);
+ EXPECT_EQ(histogram_set.histograms(0).sample_values(0), 0);
+ EXPECT_EQ(histogram_set.histograms(0).sample_values(1), 1);
+ EXPECT_EQ(histogram_set.histograms(0).sample_values(2), 2);
+}
+
+TEST(PerfHistogramWriterUnittest, WritesSamplesAndUserStory) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement", "user_story", 15e7, "Hz", false,
+ ImproveDirection::kBiggerIsBetter);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.name(), "measurement");
+
+ EXPECT_EQ(hist1.unit().unit(), proto::HERTZ);
+ EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER);
+
+ EXPECT_EQ(hist1.sample_values_size(), 1);
+ EXPECT_EQ(hist1.sample_values(0), 15e7);
+
+ EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u);
+ const proto::Diagnostic& stories =
+ hist1.diagnostics().diagnostic_map().at("stories");
+ ASSERT_EQ(stories.generic_set().values_size(), 1);
+ EXPECT_EQ(stories.generic_set().values(0), "\"user_story\"");
+}
+
+TEST(PerfHistogramWriterUnittest, WritesOneHistogramPerMeasurementAndStory) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement", "story1", 1, "ms", false,
+ ImproveDirection::kNone);
+ writer->LogResult("measurement", "story1", 2, "ms", false,
+ ImproveDirection::kNone);
+ writer->LogResult("measurement", "story2", 2, "ms", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ ASSERT_EQ(histogram_set.histograms_size(), 2);
+
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+ const proto::Histogram& hist2 = histogram_set.histograms(1);
+
+ EXPECT_EQ(hist1.name(), "measurement");
+ EXPECT_EQ(hist2.name(), "measurement");
+
+ const proto::Diagnostic& stories1 =
+ hist1.diagnostics().diagnostic_map().at("stories");
+ EXPECT_EQ(stories1.generic_set().values(0), "\"story1\"");
+ EXPECT_EQ(hist1.sample_values_size(), 2);
+
+ const proto::Diagnostic& stories2 =
+ hist2.diagnostics().diagnostic_map().at("stories");
+ EXPECT_EQ(stories2.generic_set().values(0), "\"story2\"");
+ EXPECT_EQ(hist2.sample_values_size(), 1);
+}
+
+TEST(PerfHistogramWriterUnittest, IgnoresError) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResultMeanAndError("-", "-", 17, 12345, "ms", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.running().mean(), 17);
+ EXPECT_EQ(hist1.running().variance(), 0) << "The error should be ignored.";
+}
+
+TEST(PerfHistogramWriterUnittest, WritesDecibelIntoMeasurementName) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement", "-", 0, "dB", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS)
+ << "dB should map to unitless";
+ EXPECT_EQ(hist1.name(), "measurement_dB") << "measurement should be renamed";
+}
+
+TEST(PerfHistogramWriterUnittest, WritesFpsIntoMeasurementName) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement", "-", 0, "fps", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.unit().unit(), proto::HERTZ) << "fps should map to hertz";
+ EXPECT_EQ(hist1.name(), "measurement_fps") << "measurement should be renamed";
+}
+
+TEST(PerfHistogramWriterUnittest, WritesPercentIntoMeasurementName) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement", "-", 0, "%", false, ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS)
+ << "percent should map to hertz";
+ EXPECT_EQ(hist1.name(), "measurement_%") << "measurement should be renamed";
+}
+
+TEST(PerfHistogramWriterUnittest, BitsPerSecondIsConvertedToBytes) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("-", "-", 1024, "bps", false, ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist1.sample_values(0), 128) << "1024 bits = 128 bytes";
+}
+
+TEST(PerfHistogramWriterUnittest, ParsesDirection) {
+ std::unique_ptr<PerfTestResultWriter> writer =
+ std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
+
+ writer->LogResult("measurement1", "-", 0, "bps", false,
+ ImproveDirection::kBiggerIsBetter);
+ writer->LogResult("measurement2", "-", 0, "frames", false,
+ ImproveDirection::kSmallerIsBetter);
+ writer->LogResult("measurement3", "-", 0, "sigma", false,
+ ImproveDirection::kNone);
+
+ proto::HistogramSet histogram_set;
+ histogram_set.ParseFromString(writer->Serialize());
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+ const proto::Histogram& hist2 = histogram_set.histograms(1);
+ const proto::Histogram& hist3 = histogram_set.histograms(2);
+
+ EXPECT_EQ(hist1.unit().unit(), proto::BYTES_PER_SECOND);
+ EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER);
+
+ EXPECT_EQ(hist2.unit().unit(), proto::COUNT);
+ EXPECT_EQ(hist2.unit().improvement_direction(), proto::SMALLER_IS_BETTER);
+
+ EXPECT_EQ(hist3.unit().unit(), proto::SIGMA);
+ EXPECT_EQ(hist3.unit().improvement_direction(), proto::NOT_SPECIFIED);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_result_writer.h b/third_party/libwebrtc/test/testsupport/perf_test_result_writer.h
new file mode 100644
index 0000000000..1b93bc9583
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_result_writer.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_PERF_TEST_RESULT_WRITER_H_
+#define TEST_TESTSUPPORT_PERF_TEST_RESULT_WRITER_H_
+
+#include <stdio.h>
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "test/testsupport/perf_test.h"
+
+namespace webrtc {
+namespace test {
+
+// Interface for classes that write perf results to some kind of JSON format.
+class PerfTestResultWriter {
+ public:
+ virtual ~PerfTestResultWriter() = default;
+
+ virtual void ClearResults() = 0;
+ virtual void LogResult(absl::string_view graph_name,
+ absl::string_view trace_name,
+ double value,
+ absl::string_view units,
+ bool important,
+ webrtc::test::ImproveDirection improve_direction) = 0;
+ virtual void LogResultMeanAndError(
+ absl::string_view graph_name,
+ absl::string_view trace_name,
+ double mean,
+ double error,
+ absl::string_view units,
+ bool important,
+ webrtc::test::ImproveDirection improve_direction) = 0;
+ virtual void LogResultList(
+ absl::string_view graph_name,
+ absl::string_view trace_name,
+ rtc::ArrayView<const double> values,
+ absl::string_view units,
+ bool important,
+ webrtc::test::ImproveDirection improve_direction) = 0;
+
+ virtual std::string Serialize() const = 0;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_PERF_TEST_RESULT_WRITER_H_
diff --git a/third_party/libwebrtc/test/testsupport/perf_test_unittest.cc b/third_party/libwebrtc/test/testsupport/perf_test_unittest.cc
new file mode 100644
index 0000000000..509882db08
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/perf_test_unittest.cc
@@ -0,0 +1,205 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/perf_test.h"
+
+#include <algorithm>
+#include <limits>
+#include <string>
+
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/rtc_expect_death.h"
+
+#if WEBRTC_ENABLE_PROTOBUF
+#include "third_party/catapult/tracing/tracing/value/histogram.h"
+namespace proto = catapult::tracing::tracing::proto;
+#endif
+
+namespace webrtc {
+namespace test {
+
+class PerfTest : public ::testing::Test {
+ protected:
+ void TearDown() override { ClearPerfResults(); }
+};
+
+#if defined(WEBRTC_IOS)
+#define MAYBE_TestPrintResult DISABLED_TestPrintResult
+#else
+#define MAYBE_TestPrintResult TestPrintResult
+#endif
+TEST_F(PerfTest, MAYBE_TestPrintResult) {
+ ::testing::internal::CaptureStdout();
+ std::string expected;
+
+ expected += "RESULT measurementmodifier: trace= 42 units\n";
+ PrintResult("measurement", "modifier", "trace", 42, "units", false);
+
+ expected += "*RESULT foobar: baz_v= 1423730 widgets\n";
+ PrintResult("foo", "bar", "baz_v", 1423730, "widgets", true);
+
+ expected += "RESULT foobar: baz_me= {1,2} lemurs\n";
+ PrintResultMeanAndError("foo", "bar", "baz_me", 1, 2, "lemurs", false);
+
+ const double kListOfScalars[] = {1, 2, 3};
+ expected += "RESULT foobar: baz_vl= [1,2,3] units\n";
+ PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false);
+
+ EXPECT_EQ(expected, ::testing::internal::GetCapturedStdout());
+}
+
+TEST_F(PerfTest, TestClearPerfResults) {
+ PrintResult("measurement", "modifier", "trace", 42, "units", false);
+ ClearPerfResults();
+ EXPECT_EQ("", GetPerfResults());
+}
+
+#if WEBRTC_ENABLE_PROTOBUF
+
+TEST_F(PerfTest, TestGetPerfResultsHistograms) {
+ ClearPerfResults();
+ PrintResult("measurement", "_modifier", "story_1", 42, "ms", false);
+ PrintResult("foo", "bar", "story_1", 7, "sigma", true);
+ // Note: the error will be ignored, not supported by histograms.
+ PrintResultMeanAndError("foo", "bar", "story_1", 1, 2000, "sigma", false);
+ const double kListOfScalars[] = {1, 2, 3};
+ PrintResultList("foo", "bar", "story_1", kListOfScalars, "sigma", false);
+
+ proto::HistogramSet histogram_set;
+ EXPECT_TRUE(histogram_set.ParseFromString(GetPerfResults()))
+ << "Expected valid histogram set";
+
+ ASSERT_EQ(histogram_set.histograms_size(), 2)
+ << "Should be two histograms: foobar and measurement_modifier";
+ const proto::Histogram& hist1 = histogram_set.histograms(0);
+ const proto::Histogram& hist2 = histogram_set.histograms(1);
+
+ EXPECT_EQ(hist1.name(), "foobar");
+
+ // Spot check some things in here (there's a more thorough test on the
+ // histogram writer itself).
+ EXPECT_EQ(hist1.unit().unit(), proto::SIGMA);
+ EXPECT_EQ(hist1.sample_values_size(), 5);
+ EXPECT_EQ(hist1.sample_values(0), 7);
+ EXPECT_EQ(hist1.sample_values(1), 1);
+ EXPECT_EQ(hist1.sample_values(2), 1);
+ EXPECT_EQ(hist1.sample_values(3), 2);
+ EXPECT_EQ(hist1.sample_values(4), 3);
+
+ EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u);
+ const proto::Diagnostic& stories =
+ hist1.diagnostics().diagnostic_map().at("stories");
+ ASSERT_EQ(stories.generic_set().values_size(), 1);
+ EXPECT_EQ(stories.generic_set().values(0), "\"story_1\"");
+
+ EXPECT_EQ(hist2.name(), "measurement_modifier");
+ EXPECT_EQ(hist2.unit().unit(), proto::MS_BEST_FIT_FORMAT);
+}
+
+TEST_F(PerfTest, TestGetPerfResultsHistogramsWithEmptyCounter) {
+ ClearPerfResults();
+ ::testing::internal::CaptureStdout();
+
+ SamplesStatsCounter empty_counter;
+ PrintResult("measurement", "_modifier", "story", empty_counter, "ms", false);
+
+ proto::HistogramSet histogram_set;
+ EXPECT_TRUE(histogram_set.ParseFromString(GetPerfResults()))
+ << "Expected valid histogram set";
+
+ ASSERT_EQ(histogram_set.histograms_size(), 1)
+ << "Should be one histogram: measurement_modifier";
+ const proto::Histogram& hist = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist.name(), "measurement_modifier");
+
+ // Spot check some things in here (there's a more thorough test on the
+ // histogram writer itself).
+ EXPECT_EQ(hist.unit().unit(), proto::MS_BEST_FIT_FORMAT);
+ EXPECT_EQ(hist.sample_values_size(), 1);
+ EXPECT_EQ(hist.sample_values(0), 0);
+
+ EXPECT_EQ(hist.diagnostics().diagnostic_map().count("stories"), 1u);
+ const proto::Diagnostic& stories =
+ hist.diagnostics().diagnostic_map().at("stories");
+ ASSERT_EQ(stories.generic_set().values_size(), 1);
+ EXPECT_EQ(stories.generic_set().values(0), "\"story\"");
+
+ std::string expected = "RESULT measurement_modifier: story= {0,0} ms\n";
+ EXPECT_EQ(expected, ::testing::internal::GetCapturedStdout());
+}
+
+TEST_F(PerfTest, TestGetPerfResultsHistogramsWithStatsCounter) {
+ ClearPerfResults();
+ ::testing::internal::CaptureStdout();
+
+ SamplesStatsCounter counter;
+ counter.AddSample(1);
+ counter.AddSample(2);
+ counter.AddSample(3);
+ counter.AddSample(4);
+ counter.AddSample(5);
+ PrintResult("measurement", "_modifier", "story", counter, "ms", false);
+
+ proto::HistogramSet histogram_set;
+ EXPECT_TRUE(histogram_set.ParseFromString(GetPerfResults()))
+ << "Expected valid histogram set";
+
+ ASSERT_EQ(histogram_set.histograms_size(), 1)
+ << "Should be one histogram: measurement_modifier";
+ const proto::Histogram& hist = histogram_set.histograms(0);
+
+ EXPECT_EQ(hist.name(), "measurement_modifier");
+
+ // Spot check some things in here (there's a more thorough test on the
+ // histogram writer itself).
+ EXPECT_EQ(hist.unit().unit(), proto::MS_BEST_FIT_FORMAT);
+ EXPECT_EQ(hist.sample_values_size(), 5);
+ EXPECT_THAT(hist.sample_values(), testing::ElementsAre(1, 2, 3, 4, 5));
+
+ EXPECT_EQ(hist.diagnostics().diagnostic_map().count("stories"), 1u);
+ const proto::Diagnostic& stories =
+ hist.diagnostics().diagnostic_map().at("stories");
+ ASSERT_EQ(stories.generic_set().values_size(), 1);
+ EXPECT_EQ(stories.generic_set().values(0), "\"story\"");
+
+ // mean = 3; std = sqrt(2)
+ std::string expected =
+ "RESULT measurement_modifier: story= {3,1.4142136} ms\n";
+ EXPECT_EQ(expected, ::testing::internal::GetCapturedStdout());
+}
+
+#endif // WEBRTC_ENABLE_PROTOBUF
+
+#if GTEST_HAS_DEATH_TEST
+using PerfDeathTest = PerfTest;
+
+TEST_F(PerfDeathTest, TestFiniteResultError) {
+ const double kNan = std::numeric_limits<double>::quiet_NaN();
+ const double kInf = std::numeric_limits<double>::infinity();
+
+ RTC_EXPECT_DEATH(PrintResult("a", "b", "c", kNan, "d", false), "finit");
+ RTC_EXPECT_DEATH(PrintResult("a", "b", "c", kInf, "d", false), "finit");
+
+ RTC_EXPECT_DEATH(PrintResultMeanAndError("a", "b", "c", kNan, 1, "d", false),
+ "");
+ RTC_EXPECT_DEATH(PrintResultMeanAndError("a", "b", "c", 1, kInf, "d", false),
+ "");
+
+ const double kNanList[] = {kNan, kNan};
+ RTC_EXPECT_DEATH(PrintResultList("a", "b", "c", kNanList, "d", false), "");
+ const double kInfList[] = {0, kInf};
+ RTC_EXPECT_DEATH(PrintResultList("a", "b", "c", kInfList, "d", false), "");
+}
+#endif
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/resources_dir_flag.cc b/third_party/libwebrtc/test/testsupport/resources_dir_flag.cc
new file mode 100644
index 0000000000..87a449a401
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/resources_dir_flag.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/resources_dir_flag.h"
+
+#include "absl/flags/flag.h"
+
+ABSL_FLAG(std::string,
+ resources_dir,
+ "",
+ "Where to look for the runtime dependencies. If not specified, we "
+ "will use a reasonable default depending on where we are running. "
+ "This flag is useful if we copy over test resources to a phone and "
+ "need to tell the tests where their resources are.");
diff --git a/third_party/libwebrtc/test/testsupport/resources_dir_flag.h b/third_party/libwebrtc/test/testsupport/resources_dir_flag.h
new file mode 100644
index 0000000000..7d6f192d9b
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/resources_dir_flag.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string>
+
+#ifndef TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
+#define TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
+
+#include "absl/flags/declare.h"
+
+ABSL_DECLARE_FLAG(std::string, resources_dir);
+
+#endif // TEST_TESTSUPPORT_RESOURCES_DIR_FLAG_H__
diff --git a/third_party/libwebrtc/test/testsupport/rtc_expect_death.h b/third_party/libwebrtc/test/testsupport/rtc_expect_death.h
new file mode 100644
index 0000000000..5941e12bd2
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/rtc_expect_death.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_RTC_EXPECT_DEATH_H_
+#define TEST_TESTSUPPORT_RTC_EXPECT_DEATH_H_
+
+#include "test/gtest.h"
+
+#if RTC_CHECK_MSG_ENABLED
+#define RTC_EXPECT_DEATH(statement, regex) EXPECT_DEATH(statement, regex)
+#else
+// If RTC_CHECKs messages are disabled we can't validate failure message
+#define RTC_EXPECT_DEATH(statement, regex) EXPECT_DEATH(statement, "")
+#endif
+
+#endif // TEST_TESTSUPPORT_RTC_EXPECT_DEATH_H_
diff --git a/third_party/libwebrtc/test/testsupport/test_artifacts.cc b/third_party/libwebrtc/test/testsupport/test_artifacts.cc
new file mode 100644
index 0000000000..6f062e5fe4
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/test_artifacts.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/test_artifacts.h"
+
+#include <string.h>
+
+#include "absl/flags/flag.h"
+#include "absl/flags/parse.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/system/file_wrapper.h"
+#include "test/testsupport/file_utils.h"
+
+namespace {
+const std::string& DefaultArtifactPath() {
+ static const std::string path = webrtc::test::OutputPath();
+ return path;
+}
+} // namespace
+
+ABSL_FLAG(std::string,
+ test_artifacts_dir,
+ DefaultArtifactPath().c_str(),
+ "The output folder where test output should be saved.");
+
+namespace webrtc {
+namespace test {
+
+bool GetTestArtifactsDir(std::string* out_dir) {
+ if (absl::GetFlag(FLAGS_test_artifacts_dir).empty()) {
+ RTC_LOG(LS_WARNING) << "No test_out_dir defined.";
+ return false;
+ }
+ *out_dir = absl::GetFlag(FLAGS_test_artifacts_dir);
+ return true;
+}
+
+bool WriteToTestArtifactsDir(const char* filename,
+ const uint8_t* buffer,
+ size_t length) {
+ if (absl::GetFlag(FLAGS_test_artifacts_dir).empty()) {
+ RTC_LOG(LS_WARNING) << "No test_out_dir defined.";
+ return false;
+ }
+
+ if (filename == nullptr || strlen(filename) == 0) {
+ RTC_LOG(LS_WARNING) << "filename must be provided.";
+ return false;
+ }
+
+ FileWrapper output = FileWrapper::OpenWriteOnly(
+ JoinFilename(absl::GetFlag(FLAGS_test_artifacts_dir), filename));
+
+ return output.is_open() && output.Write(buffer, length);
+}
+
+bool WriteToTestArtifactsDir(const char* filename, const std::string& content) {
+ return WriteToTestArtifactsDir(
+ filename, reinterpret_cast<const uint8_t*>(content.c_str()),
+ content.length());
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/test_artifacts.h b/third_party/libwebrtc/test/testsupport/test_artifacts.h
new file mode 100644
index 0000000000..ba0d4d39cb
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/test_artifacts.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_TEST_ARTIFACTS_H_
+#define TEST_TESTSUPPORT_TEST_ARTIFACTS_H_
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#include <string>
+
+namespace webrtc {
+namespace test {
+
+// If the test_artifacts_dir flag is set, returns true and copies the location
+// of the dir to `out_dir`. Otherwise, return false.
+bool GetTestArtifactsDir(std::string* out_dir);
+
+// Writes a `length` bytes array `buffer` to `filename` in isolated output
+// directory defined by swarming. If the file is existing, content will be
+// appended. Otherwise a new file will be created. This function returns false
+// if isolated output directory has not been defined, or `filename` indicates an
+// invalid or non-writable file, or underlying file system errors.
+bool WriteToTestArtifactsDir(const char* filename,
+ const uint8_t* buffer,
+ size_t length);
+
+bool WriteToTestArtifactsDir(const char* filename, const std::string& content);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_TEST_ARTIFACTS_H_
diff --git a/third_party/libwebrtc/test/testsupport/test_artifacts_unittest.cc b/third_party/libwebrtc/test/testsupport/test_artifacts_unittest.cc
new file mode 100644
index 0000000000..fb577610fb
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/test_artifacts_unittest.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/test_artifacts.h"
+
+#include <string.h>
+
+#include <string>
+
+#include "absl/flags/declare.h"
+#include "absl/flags/flag.h"
+#include "rtc_base/system/file_wrapper.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+ABSL_DECLARE_FLAG(std::string, test_artifacts_dir);
+
+namespace webrtc {
+namespace test {
+
+TEST(IsolatedOutputTest, ShouldRejectInvalidIsolatedOutDir) {
+ const std::string backup = absl::GetFlag(FLAGS_test_artifacts_dir);
+ absl::SetFlag(&FLAGS_test_artifacts_dir, "");
+ ASSERT_FALSE(WriteToTestArtifactsDir("a-file", "some-contents"));
+ absl::SetFlag(&FLAGS_test_artifacts_dir, backup);
+}
+
+TEST(IsolatedOutputTest, ShouldRejectInvalidFileName) {
+ ASSERT_FALSE(WriteToTestArtifactsDir(nullptr, "some-contents"));
+ ASSERT_FALSE(WriteToTestArtifactsDir("", "some-contents"));
+}
+
+// Sets isolated_out_dir=<a-writable-path> to execute this test.
+TEST(IsolatedOutputTest, ShouldBeAbleToWriteContent) {
+ const char* filename = "a-file";
+ const char* content = "some-contents";
+ if (WriteToTestArtifactsDir(filename, content)) {
+ std::string out_file =
+ JoinFilename(absl::GetFlag(FLAGS_test_artifacts_dir), filename);
+ FileWrapper input = FileWrapper::OpenReadOnly(out_file);
+ EXPECT_TRUE(input.is_open());
+ EXPECT_TRUE(input.Rewind());
+ uint8_t buffer[32];
+ EXPECT_EQ(input.Read(buffer, strlen(content)), strlen(content));
+ buffer[strlen(content)] = 0;
+ EXPECT_EQ(std::string(content),
+ std::string(reinterpret_cast<char*>(buffer)));
+ input.Close();
+
+ EXPECT_TRUE(RemoveFile(out_file));
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/video_frame_writer.cc b/third_party/libwebrtc/test/testsupport/video_frame_writer.cc
new file mode 100644
index 0000000000..c36ebdeed7
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/video_frame_writer.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/video_frame_writer.h"
+
+#include <cmath>
+#include <cstdlib>
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+rtc::Buffer ExtractI420BufferWithSize(const VideoFrame& frame,
+ int width,
+ int height) {
+ if (frame.width() != width || frame.height() != height) {
+ RTC_CHECK_LE(std::abs(static_cast<double>(width) / height -
+ static_cast<double>(frame.width()) / frame.height()),
+ 2 * std::numeric_limits<double>::epsilon());
+ // Same aspect ratio, no cropping needed.
+ rtc::scoped_refptr<I420Buffer> scaled(I420Buffer::Create(width, height));
+ scaled->ScaleFrom(*frame.video_frame_buffer()->ToI420());
+
+ size_t length =
+ CalcBufferSize(VideoType::kI420, scaled->width(), scaled->height());
+ rtc::Buffer buffer(length);
+ RTC_CHECK_NE(ExtractBuffer(scaled, length, buffer.data()), -1);
+ return buffer;
+ }
+
+ // No resize.
+ size_t length =
+ CalcBufferSize(VideoType::kI420, frame.width(), frame.height());
+ rtc::Buffer buffer(length);
+ RTC_CHECK_NE(ExtractBuffer(frame, length, buffer.data()), -1);
+ return buffer;
+}
+
+} // namespace
+
+Y4mVideoFrameWriterImpl::Y4mVideoFrameWriterImpl(std::string output_file_name,
+ int width,
+ int height,
+ int fps)
+ // We will move string here to prevent extra copy. We won't use const ref
+ // to not corrupt caller variable with move and don't assume that caller's
+ // variable won't be destructed before writer.
+ : width_(width),
+ height_(height),
+ frame_writer_(
+ std::make_unique<Y4mFrameWriterImpl>(std::move(output_file_name),
+ width_,
+ height_,
+ fps)) {
+ // Init underlying frame writer and ensure that it is operational.
+ RTC_CHECK(frame_writer_->Init());
+}
+
+bool Y4mVideoFrameWriterImpl::WriteFrame(const webrtc::VideoFrame& frame) {
+ rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_);
+ RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength());
+ return frame_writer_->WriteFrame(frame_buffer.data());
+}
+
+void Y4mVideoFrameWriterImpl::Close() {
+ frame_writer_->Close();
+}
+
+YuvVideoFrameWriterImpl::YuvVideoFrameWriterImpl(std::string output_file_name,
+ int width,
+ int height)
+ // We will move string here to prevent extra copy. We won't use const ref
+ // to not corrupt caller variable with move and don't assume that caller's
+ // variable won't be destructed before writer.
+ : width_(width),
+ height_(height),
+ frame_writer_(
+ std::make_unique<YuvFrameWriterImpl>(std::move(output_file_name),
+ width_,
+ height_)) {
+ // Init underlying frame writer and ensure that it is operational.
+ RTC_CHECK(frame_writer_->Init());
+}
+
+bool YuvVideoFrameWriterImpl::WriteFrame(const webrtc::VideoFrame& frame) {
+ rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_);
+ RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength());
+ return frame_writer_->WriteFrame(frame_buffer.data());
+}
+
+void YuvVideoFrameWriterImpl::Close() {
+ frame_writer_->Close();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/video_frame_writer.h b/third_party/libwebrtc/test/testsupport/video_frame_writer.h
new file mode 100644
index 0000000000..1cc4e56284
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/video_frame_writer.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_TESTSUPPORT_VIDEO_FRAME_WRITER_H_
+#define TEST_TESTSUPPORT_VIDEO_FRAME_WRITER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/test/video/video_frame_writer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/buffer.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+// Writes webrtc::VideoFrame to specified file with y4m frame writer
+class Y4mVideoFrameWriterImpl : public VideoFrameWriter {
+ public:
+ Y4mVideoFrameWriterImpl(std::string output_file_name,
+ int width,
+ int height,
+ int fps);
+ ~Y4mVideoFrameWriterImpl() override = default;
+
+ bool WriteFrame(const webrtc::VideoFrame& frame) override;
+ void Close() override;
+
+ private:
+ const int width_;
+ const int height_;
+
+ std::unique_ptr<FrameWriter> frame_writer_;
+};
+
+// Writes webrtc::VideoFrame to specified file with yuv frame writer
+class YuvVideoFrameWriterImpl : public VideoFrameWriter {
+ public:
+ YuvVideoFrameWriterImpl(std::string output_file_name, int width, int height);
+ ~YuvVideoFrameWriterImpl() override = default;
+
+ bool WriteFrame(const webrtc::VideoFrame& frame) override;
+ void Close() override;
+
+ private:
+ const int width_;
+ const int height_;
+
+ std::unique_ptr<FrameWriter> frame_writer_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_TESTSUPPORT_VIDEO_FRAME_WRITER_H_
diff --git a/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc b/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc
new file mode 100644
index 0000000000..9d59627c0f
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/video_frame_writer_unittest.cc
@@ -0,0 +1,173 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/video_frame_writer.h"
+
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/test/video/video_frame_writer.h"
+#include "api/video/i420_buffer.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+const size_t kFrameWidth = 50;
+const size_t kFrameHeight = 20;
+const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420.
+const size_t kFrameRate = 30;
+
+// Size of header: "YUV4MPEG2 W50 H20 F30:1 C420\n"
+const size_t kFileHeaderSize = 29;
+// Size of header: "FRAME\n"
+const size_t kFrameHeaderSize = 6;
+
+rtc::scoped_refptr<I420Buffer> CreateI420Buffer(int width, int height) {
+ rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width, height));
+ for (int x = 0; x < width; x++) {
+ for (int y = 0; y < height; y++) {
+ buffer->MutableDataY()[x + y * width] = 128;
+ }
+ }
+ int chroma_width = buffer->ChromaWidth();
+ int chroma_height = buffer->ChromaHeight();
+ for (int x = 0; x < chroma_width; x++) {
+ for (int y = 0; y < chroma_height; y++) {
+ buffer->MutableDataU()[x + y * chroma_width] = 1;
+ buffer->MutableDataV()[x + y * chroma_width] = 255;
+ }
+ }
+ return buffer;
+}
+
+void AssertI420BuffersEq(
+ rtc::scoped_refptr<webrtc::I420BufferInterface> actual,
+ rtc::scoped_refptr<webrtc::I420BufferInterface> expected) {
+ ASSERT_TRUE(actual);
+
+ ASSERT_EQ(actual->width(), expected->width());
+ ASSERT_EQ(actual->height(), expected->height());
+ const int width = expected->width();
+ const int height = expected->height();
+ for (int x = 0; x < width; x++) {
+ for (int y = 0; y < height; y++) {
+ ASSERT_EQ(actual->DataY()[x + y * width],
+ expected->DataY()[x + y * width]);
+ }
+ }
+
+ ASSERT_EQ(actual->ChromaWidth(), expected->ChromaWidth());
+ ASSERT_EQ(actual->ChromaHeight(), expected->ChromaHeight());
+ int chroma_width = expected->ChromaWidth();
+ int chroma_height = expected->ChromaHeight();
+ for (int x = 0; x < chroma_width; x++) {
+ for (int y = 0; y < chroma_height; y++) {
+ ASSERT_EQ(actual->DataU()[x + y * chroma_width],
+ expected->DataU()[x + y * chroma_width]);
+ ASSERT_EQ(actual->DataV()[x + y * chroma_width],
+ expected->DataV()[x + y * chroma_width]);
+ }
+ }
+}
+
+} // namespace
+
+class VideoFrameWriterTest : public ::testing::Test {
+ protected:
+ VideoFrameWriterTest() = default;
+ ~VideoFrameWriterTest() override = default;
+
+ void SetUp() override {
+ temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "video_frame_writer_unittest");
+ frame_writer_ = CreateFrameWriter();
+ }
+
+ virtual std::unique_ptr<VideoFrameWriter> CreateFrameWriter() = 0;
+
+ void TearDown() override { remove(temp_filename_.c_str()); }
+
+ std::unique_ptr<VideoFrameWriter> frame_writer_;
+ std::string temp_filename_;
+};
+
+class Y4mVideoFrameWriterTest : public VideoFrameWriterTest {
+ protected:
+ std::unique_ptr<VideoFrameWriter> CreateFrameWriter() override {
+ return std::make_unique<Y4mVideoFrameWriterImpl>(
+ temp_filename_, kFrameWidth, kFrameHeight, kFrameRate);
+ }
+};
+
+class YuvVideoFrameWriterTest : public VideoFrameWriterTest {
+ protected:
+ std::unique_ptr<VideoFrameWriter> CreateFrameWriter() override {
+ return std::make_unique<YuvVideoFrameWriterImpl>(temp_filename_,
+ kFrameWidth, kFrameHeight);
+ }
+};
+
+TEST_F(Y4mVideoFrameWriterTest, InitSuccess) {}
+
+TEST_F(Y4mVideoFrameWriterTest, WriteFrame) {
+ rtc::scoped_refptr<I420Buffer> expected_buffer =
+ CreateI420Buffer(kFrameWidth, kFrameHeight);
+
+ VideoFrame frame =
+ VideoFrame::Builder().set_video_frame_buffer(expected_buffer).build();
+
+ ASSERT_TRUE(frame_writer_->WriteFrame(frame));
+ ASSERT_TRUE(frame_writer_->WriteFrame(frame));
+
+ frame_writer_->Close();
+ EXPECT_EQ(kFileHeaderSize + 2 * kFrameHeaderSize + 2 * kFrameLength,
+ GetFileSize(temp_filename_));
+
+ std::unique_ptr<FrameReader> frame_reader =
+ CreateY4mFrameReader(temp_filename_);
+ AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer);
+ AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer);
+ EXPECT_FALSE(frame_reader->PullFrame()); // End of file.
+}
+
+TEST_F(YuvVideoFrameWriterTest, InitSuccess) {}
+
+TEST_F(YuvVideoFrameWriterTest, WriteFrame) {
+ rtc::scoped_refptr<I420Buffer> expected_buffer =
+ CreateI420Buffer(kFrameWidth, kFrameHeight);
+
+ VideoFrame frame =
+ VideoFrame::Builder().set_video_frame_buffer(expected_buffer).build();
+
+ ASSERT_TRUE(frame_writer_->WriteFrame(frame));
+ ASSERT_TRUE(frame_writer_->WriteFrame(frame));
+
+ frame_writer_->Close();
+ EXPECT_EQ(2 * kFrameLength, GetFileSize(temp_filename_));
+
+ std::unique_ptr<FrameReader> frame_reader = CreateYuvFrameReader(
+ temp_filename_,
+ Resolution({.width = kFrameWidth, .height = kFrameHeight}));
+ AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer);
+ AssertI420BuffersEq(frame_reader->PullFrame(), expected_buffer);
+ EXPECT_FALSE(frame_reader->PullFrame()); // End of file.
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc
new file mode 100644
index 0000000000..72fb9b5188
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/y4m_frame_reader.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+constexpr int kFrameHeaderSize = 6; // "FRAME\n"
+} // namespace
+
+void ParseY4mHeader(std::string filepath,
+ Resolution* resolution,
+ int* header_size) {
+ FILE* file = fopen(filepath.c_str(), "r");
+ RTC_CHECK(file != NULL) << "Cannot open " << filepath;
+
+ // Length of Y4M header is technically unlimited due to the comment tag 'X'.
+ char h[1024];
+ RTC_CHECK(fgets(h, sizeof(h), file) != NULL)
+ << "File " << filepath << " is too small";
+ fclose(file);
+
+ RTC_CHECK(sscanf(h, "YUV4MPEG2 W%d H%d", &resolution->width,
+ &resolution->height) == 2)
+ << filepath << " is not a valid Y4M file";
+
+ RTC_CHECK_GT(resolution->width, 0) << "Width must be positive";
+ RTC_CHECK_GT(resolution->height, 0) << "Height must be positive";
+
+ *header_size = strcspn(h, "\n") + 1;
+ RTC_CHECK(static_cast<unsigned>(*header_size) < sizeof(h))
+ << filepath << " has unexpectedly large header";
+}
+
+Y4mFrameReaderImpl::Y4mFrameReaderImpl(std::string filepath,
+ RepeatMode repeat_mode)
+ : YuvFrameReaderImpl(filepath, Resolution(), repeat_mode) {}
+
+void Y4mFrameReaderImpl::Init() {
+ file_ = fopen(filepath_.c_str(), "rb");
+ RTC_CHECK(file_ != nullptr) << "Cannot open " << filepath_;
+
+ ParseY4mHeader(filepath_, &resolution_, &header_size_bytes_);
+ frame_size_bytes_ =
+ CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height);
+ frame_size_bytes_ += kFrameHeaderSize;
+
+ size_t file_size_bytes = GetFileSize(filepath_);
+ RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty";
+ RTC_CHECK_GT(file_size_bytes, header_size_bytes_)
+ << "File " << filepath_ << " is too small";
+
+ num_frames_ = static_cast<int>((file_size_bytes - header_size_bytes_) /
+ frame_size_bytes_);
+ RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small";
+ header_size_bytes_ += kFrameHeaderSize;
+}
+
+std::unique_ptr<FrameReader> CreateY4mFrameReader(std::string filepath) {
+ return CreateY4mFrameReader(filepath,
+ YuvFrameReaderImpl::RepeatMode::kSingle);
+}
+
+std::unique_ptr<FrameReader> CreateY4mFrameReader(
+ std::string filepath,
+ YuvFrameReaderImpl::RepeatMode repeat_mode) {
+ Y4mFrameReaderImpl* frame_reader =
+ new Y4mFrameReaderImpl(filepath, repeat_mode);
+ frame_reader->Init();
+ return std::unique_ptr<FrameReader>(frame_reader);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc
new file mode 100644
index 0000000000..df81a8135b
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/y4m_frame_reader_unittest.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+using Ratio = FrameReader::Ratio;
+using RepeatMode = YuvFrameReaderImpl::RepeatMode;
+
+constexpr Resolution kResolution({.width = 1, .height = 1});
+constexpr char kFileHeader[] = "YUV4MPEG2 W1 H1 F30:1 C420\n";
+constexpr char kFrameHeader[] = "FRAME\n";
+constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}};
+constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]);
+} // namespace
+
+class Y4mFrameReaderTest : public ::testing::Test {
+ protected:
+ Y4mFrameReaderTest() = default;
+ ~Y4mFrameReaderTest() override = default;
+
+ void SetUp() override {
+ filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "y4m_frame_reader_unittest");
+ FILE* file = fopen(filepath_.c_str(), "wb");
+ fwrite(kFileHeader, 1, sizeof(kFileHeader) - 1, file);
+ for (int n = 0; n < kNumFrames; ++n) {
+ fwrite(kFrameHeader, 1, sizeof(kFrameHeader) - 1, file);
+ fwrite(kFrameContent[n], 1, sizeof(kFrameContent[n]), file);
+ }
+ fclose(file);
+
+ reader_ = CreateY4mFrameReader(filepath_);
+ }
+
+ void TearDown() override { remove(filepath_.c_str()); }
+
+ std::string filepath_;
+ std::unique_ptr<FrameReader> reader_;
+};
+
+TEST_F(Y4mFrameReaderTest, num_frames) {
+ EXPECT_EQ(kNumFrames, reader_->num_frames());
+}
+
+TEST_F(Y4mFrameReaderTest, PullFrame_frameResolution) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame();
+ EXPECT_EQ(kResolution.width, buffer->width());
+ EXPECT_EQ(kResolution.height, buffer->height());
+}
+
+TEST_F(Y4mFrameReaderTest, PullFrame_frameContent) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame();
+ EXPECT_EQ(kFrameContent[0][0], *buffer->DataY());
+ EXPECT_EQ(kFrameContent[0][1], *buffer->DataU());
+ EXPECT_EQ(kFrameContent[0][2], *buffer->DataV());
+}
+
+TEST_F(Y4mFrameReaderTest, ReadFrame_randomOrder) {
+ std::vector<int> expected_frames = {2, 0, 1};
+ std::vector<int> actual_frames;
+ for (int frame_num : expected_frames) {
+ rtc::scoped_refptr<I420BufferInterface> buffer =
+ reader_->ReadFrame(frame_num);
+ actual_frames.push_back(*buffer->DataY());
+ }
+ EXPECT_EQ(expected_frames, actual_frames);
+}
+
+TEST_F(Y4mFrameReaderTest, PullFrame_scale) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame(
+ /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}),
+ FrameReader::kNoScale);
+ EXPECT_EQ(2, buffer->width());
+ EXPECT_EQ(2, buffer->height());
+}
+
+class Y4mFrameReaderRepeatModeTest
+ : public Y4mFrameReaderTest,
+ public ::testing::WithParamInterface<
+ std::tuple<RepeatMode, std::vector<int>>> {};
+
+TEST_P(Y4mFrameReaderRepeatModeTest, PullFrame) {
+ RepeatMode mode = std::get<0>(GetParam());
+ std::vector<int> expected_frames = std::get<1>(GetParam());
+
+ reader_ = CreateY4mFrameReader(filepath_, mode);
+ std::vector<int> read_frames;
+ for (size_t i = 0; i < expected_frames.size(); ++i) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame();
+ read_frames.push_back(*buffer->DataY());
+ }
+ EXPECT_EQ(expected_frames, read_frames);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ Y4mFrameReaderTest,
+ Y4mFrameReaderRepeatModeTest,
+ ::testing::ValuesIn(
+ {std::make_tuple(RepeatMode::kSingle, std::vector<int>{0, 1, 2}),
+ std::make_tuple(RepeatMode::kRepeat,
+ std::vector<int>{0, 1, 2, 0, 1, 2}),
+ std::make_tuple(RepeatMode::kPingPong,
+ std::vector<int>{0, 1, 2, 1, 0, 1, 2})}));
+
+class Y4mFrameReaderFramerateScaleTest
+ : public Y4mFrameReaderTest,
+ public ::testing::WithParamInterface<
+ std::tuple<Ratio, std::vector<int>>> {};
+
+TEST_P(Y4mFrameReaderFramerateScaleTest, PullFrame) {
+ Ratio framerate_scale = std::get<0>(GetParam());
+ std::vector<int> expected_frames = std::get<1>(GetParam());
+
+ std::vector<int> actual_frames;
+ for (size_t i = 0; i < expected_frames.size(); ++i) {
+ int pulled_frame;
+ rtc::scoped_refptr<I420BufferInterface> buffer =
+ reader_->PullFrame(&pulled_frame, kResolution, framerate_scale);
+ actual_frames.push_back(pulled_frame);
+ }
+ EXPECT_EQ(expected_frames, actual_frames);
+}
+
+INSTANTIATE_TEST_SUITE_P(Y4mFrameReaderTest,
+ Y4mFrameReaderFramerateScaleTest,
+ ::testing::ValuesIn({
+ std::make_tuple(Ratio({.num = 1, .den = 2}),
+ std::vector<int>{0, 2, 4}),
+ std::make_tuple(Ratio({.num = 2, .den = 3}),
+ std::vector<int>{0, 1, 3, 4, 6}),
+ std::make_tuple(Ratio({.num = 2, .den = 1}),
+ std::vector<int>{0, 0, 1, 1}),
+ }));
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_writer.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_writer.cc
new file mode 100644
index 0000000000..1bb4543963
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/y4m_frame_writer.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <string>
+
+#include "rtc_base/logging.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+Y4mFrameWriterImpl::Y4mFrameWriterImpl(std::string output_filename,
+ int width,
+ int height,
+ int frame_rate)
+ : YuvFrameWriterImpl(output_filename, width, height),
+ frame_rate_(frame_rate) {}
+
+Y4mFrameWriterImpl::~Y4mFrameWriterImpl() = default;
+
+bool Y4mFrameWriterImpl::Init() {
+ if (!YuvFrameWriterImpl::Init()) {
+ return false;
+ }
+ int bytes_written = fprintf(output_file_, "YUV4MPEG2 W%d H%d F%d:1 C420\n",
+ width_, height_, frame_rate_);
+ if (bytes_written < 0) {
+ RTC_LOG(LS_ERROR) << "Failed to write Y4M file header to file: "
+ << output_filename_.c_str();
+ return false;
+ }
+ return true;
+}
+
+bool Y4mFrameWriterImpl::WriteFrame(const uint8_t* frame_buffer) {
+ if (output_file_ == nullptr) {
+ RTC_LOG(LS_ERROR) << "Y4mFrameWriterImpl is not initialized.";
+ return false;
+ }
+ int bytes_written = fprintf(output_file_, "FRAME\n");
+ if (bytes_written < 0) {
+ RTC_LOG(LS_ERROR) << "Couldn't write Y4M frame header to file: "
+ << output_filename_.c_str();
+ return false;
+ }
+ return YuvFrameWriterImpl::WriteFrame(frame_buffer);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_writer_unittest.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_writer_unittest.cc
new file mode 100644
index 0000000000..f12a4b8e4f
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/y4m_frame_writer_unittest.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+
+#include <memory>
+#include <string>
+
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+const size_t kFrameWidth = 50;
+const size_t kFrameHeight = 20;
+const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420.
+const size_t kFrameRate = 30;
+
+const std::string kFileHeader = "YUV4MPEG2 W50 H20 F30:1 C420\n";
+const std::string kFrameHeader = "FRAME\n";
+} // namespace
+
+class Y4mFrameWriterTest : public ::testing::Test {
+ protected:
+ Y4mFrameWriterTest() = default;
+ ~Y4mFrameWriterTest() override = default;
+
+ void SetUp() override {
+ temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "y4m_frame_writer_unittest");
+ frame_writer_.reset(new Y4mFrameWriterImpl(temp_filename_, kFrameWidth,
+ kFrameHeight, kFrameRate));
+ ASSERT_TRUE(frame_writer_->Init());
+ }
+
+ void TearDown() override { remove(temp_filename_.c_str()); }
+
+ std::unique_ptr<FrameWriter> frame_writer_;
+ std::string temp_filename_;
+};
+
+TEST_F(Y4mFrameWriterTest, InitSuccess) {}
+
+TEST_F(Y4mFrameWriterTest, FrameLength) {
+ EXPECT_EQ(kFrameLength, frame_writer_->FrameLength());
+}
+
+TEST_F(Y4mFrameWriterTest, WriteFrame) {
+ uint8_t buffer[kFrameLength];
+ memset(buffer, 9, kFrameLength); // Write lots of 9s to the buffer.
+ bool result = frame_writer_->WriteFrame(buffer);
+ ASSERT_TRUE(result);
+ result = frame_writer_->WriteFrame(buffer);
+ ASSERT_TRUE(result);
+
+ frame_writer_->Close();
+ EXPECT_EQ(kFileHeader.size() + 2 * kFrameHeader.size() + 2 * kFrameLength,
+ GetFileSize(temp_filename_));
+}
+
+TEST_F(Y4mFrameWriterTest, WriteFrameUninitialized) {
+ uint8_t buffer[kFrameLength];
+ Y4mFrameWriterImpl frame_writer(temp_filename_, kFrameWidth, kFrameHeight,
+ kFrameRate);
+ EXPECT_FALSE(frame_writer.WriteFrame(buffer));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc
new file mode 100644
index 0000000000..02c1a68008
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/yuv_frame_reader.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/logging.h"
+#include "test/frame_utils.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+using RepeatMode = YuvFrameReaderImpl::RepeatMode;
+
+int WrapFrameNum(int frame_num, int num_frames, RepeatMode mode) {
+ RTC_CHECK_GE(frame_num, 0) << "frame_num cannot be negative";
+ RTC_CHECK_GT(num_frames, 0) << "num_frames must be greater than 0";
+ if (mode == RepeatMode::kSingle) {
+ return frame_num;
+ }
+ if (mode == RepeatMode::kRepeat) {
+ return frame_num % num_frames;
+ }
+
+ RTC_CHECK_EQ(RepeatMode::kPingPong, mode);
+ int cycle_len = 2 * (num_frames - 1);
+ int wrapped_num = frame_num % cycle_len;
+ if (wrapped_num >= num_frames) {
+ return cycle_len - wrapped_num;
+ }
+ return wrapped_num;
+}
+
+rtc::scoped_refptr<I420Buffer> Scale(rtc::scoped_refptr<I420Buffer> buffer,
+ Resolution resolution) {
+ if (buffer->width() == resolution.width &&
+ buffer->height() == resolution.height) {
+ return buffer;
+ }
+ rtc::scoped_refptr<I420Buffer> scaled(
+ I420Buffer::Create(resolution.width, resolution.height));
+ scaled->ScaleFrom(*buffer.get());
+ return scaled;
+}
+} // namespace
+
+int YuvFrameReaderImpl::RateScaler::Skip(Ratio framerate_scale) {
+ ticks_ = ticks_.value_or(framerate_scale.num);
+ int skip = 0;
+ while (ticks_ <= 0) {
+ *ticks_ += framerate_scale.num;
+ ++skip;
+ }
+ *ticks_ -= framerate_scale.den;
+ return skip;
+}
+
+YuvFrameReaderImpl::YuvFrameReaderImpl(std::string filepath,
+ Resolution resolution,
+ RepeatMode repeat_mode)
+ : filepath_(filepath),
+ resolution_(resolution),
+ repeat_mode_(repeat_mode),
+ num_frames_(0),
+ frame_num_(0),
+ frame_size_bytes_(0),
+ header_size_bytes_(0),
+ file_(nullptr) {}
+
+YuvFrameReaderImpl::~YuvFrameReaderImpl() {
+ if (file_ != nullptr) {
+ fclose(file_);
+ file_ = nullptr;
+ }
+}
+
+void YuvFrameReaderImpl::Init() {
+ RTC_CHECK_GT(resolution_.width, 0) << "Width must be positive";
+ RTC_CHECK_GT(resolution_.height, 0) << "Height must be positive";
+ frame_size_bytes_ =
+ CalcBufferSize(VideoType::kI420, resolution_.width, resolution_.height);
+
+ file_ = fopen(filepath_.c_str(), "rb");
+ RTC_CHECK(file_ != NULL) << "Cannot open " << filepath_;
+
+ size_t file_size_bytes = GetFileSize(filepath_);
+ RTC_CHECK_GT(file_size_bytes, 0u) << "File " << filepath_ << " is empty";
+
+ num_frames_ = static_cast<int>(file_size_bytes / frame_size_bytes_);
+ RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small";
+}
+
+rtc::scoped_refptr<I420Buffer> YuvFrameReaderImpl::PullFrame() {
+ return PullFrame(/*frame_num=*/nullptr);
+}
+
+rtc::scoped_refptr<I420Buffer> YuvFrameReaderImpl::PullFrame(int* frame_num) {
+ return PullFrame(frame_num, resolution_, /*framerate_scale=*/kNoScale);
+}
+
+rtc::scoped_refptr<I420Buffer> YuvFrameReaderImpl::PullFrame(
+ int* frame_num,
+ Resolution resolution,
+ Ratio framerate_scale) {
+ frame_num_ += framerate_scaler_.Skip(framerate_scale);
+ auto buffer = ReadFrame(frame_num_, resolution);
+ if (frame_num != nullptr) {
+ *frame_num = frame_num_;
+ }
+ return buffer;
+}
+
+rtc::scoped_refptr<I420Buffer> YuvFrameReaderImpl::ReadFrame(int frame_num) {
+ return ReadFrame(frame_num, resolution_);
+}
+
+rtc::scoped_refptr<I420Buffer> YuvFrameReaderImpl::ReadFrame(
+ int frame_num,
+ Resolution resolution) {
+ int wrapped_num = WrapFrameNum(frame_num, num_frames_, repeat_mode_);
+ if (wrapped_num >= num_frames_) {
+ RTC_CHECK_EQ(RepeatMode::kSingle, repeat_mode_);
+ return nullptr;
+ }
+ fseek(file_, header_size_bytes_ + wrapped_num * frame_size_bytes_, SEEK_SET);
+ auto buffer = ReadI420Buffer(resolution_.width, resolution_.height, file_);
+ RTC_CHECK(buffer != nullptr);
+
+ return Scale(buffer, resolution);
+}
+
+std::unique_ptr<FrameReader> CreateYuvFrameReader(std::string filepath,
+ Resolution resolution) {
+ return CreateYuvFrameReader(filepath, resolution,
+ YuvFrameReaderImpl::RepeatMode::kSingle);
+}
+
+std::unique_ptr<FrameReader> CreateYuvFrameReader(
+ std::string filepath,
+ Resolution resolution,
+ YuvFrameReaderImpl::RepeatMode repeat_mode) {
+ YuvFrameReaderImpl* frame_reader =
+ new YuvFrameReaderImpl(filepath, resolution, repeat_mode);
+ frame_reader->Init();
+ return std::unique_ptr<FrameReader>(frame_reader);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc
new file mode 100644
index 0000000000..b9ea2d0c46
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/yuv_frame_reader_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame_buffer.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_reader.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+using Ratio = FrameReader::Ratio;
+using RepeatMode = YuvFrameReaderImpl::RepeatMode;
+
+constexpr Resolution kResolution({.width = 1, .height = 1});
+constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}};
+constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]);
+} // namespace
+
+class YuvFrameReaderTest : public ::testing::Test {
+ protected:
+ YuvFrameReaderTest() = default;
+ ~YuvFrameReaderTest() override = default;
+
+ void SetUp() override {
+ filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "yuv_frame_reader_unittest");
+ FILE* file = fopen(filepath_.c_str(), "wb");
+ fwrite(kFrameContent, 1, sizeof(kFrameContent), file);
+ fclose(file);
+
+ reader_ = CreateYuvFrameReader(filepath_, kResolution);
+ }
+
+ void TearDown() override { remove(filepath_.c_str()); }
+
+ std::string filepath_;
+ std::unique_ptr<FrameReader> reader_;
+};
+
+TEST_F(YuvFrameReaderTest, num_frames) {
+ EXPECT_EQ(kNumFrames, reader_->num_frames());
+}
+
+TEST_F(YuvFrameReaderTest, PullFrame_frameContent) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame();
+ EXPECT_EQ(kFrameContent[0][0], *buffer->DataY());
+ EXPECT_EQ(kFrameContent[0][1], *buffer->DataU());
+ EXPECT_EQ(kFrameContent[0][2], *buffer->DataV());
+}
+
+TEST_F(YuvFrameReaderTest, ReadFrame_randomOrder) {
+ std::vector<int> expected_frames = {2, 0, 1};
+ std::vector<int> actual_frames;
+ for (int frame_num : expected_frames) {
+ rtc::scoped_refptr<I420BufferInterface> buffer =
+ reader_->ReadFrame(frame_num);
+ actual_frames.push_back(*buffer->DataY());
+ }
+ EXPECT_EQ(expected_frames, actual_frames);
+}
+
+TEST_F(YuvFrameReaderTest, PullFrame_scale) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame(
+ /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}),
+ FrameReader::kNoScale);
+ EXPECT_EQ(2, buffer->width());
+ EXPECT_EQ(2, buffer->height());
+}
+
+class YuvFrameReaderRepeatModeTest
+ : public YuvFrameReaderTest,
+ public ::testing::WithParamInterface<
+ std::tuple<RepeatMode, std::vector<int>>> {};
+
+TEST_P(YuvFrameReaderRepeatModeTest, PullFrame) {
+ RepeatMode mode = std::get<0>(GetParam());
+ std::vector<int> expected_frames = std::get<1>(GetParam());
+
+ reader_ = CreateYuvFrameReader(filepath_, kResolution, mode);
+ std::vector<int> read_frames;
+ for (size_t i = 0; i < expected_frames.size(); ++i) {
+ rtc::scoped_refptr<I420BufferInterface> buffer = reader_->PullFrame();
+ read_frames.push_back(*buffer->DataY());
+ }
+ EXPECT_EQ(expected_frames, read_frames);
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ YuvFrameReaderTest,
+ YuvFrameReaderRepeatModeTest,
+ ::testing::ValuesIn(
+ {std::make_tuple(RepeatMode::kSingle, std::vector<int>{0, 1, 2}),
+ std::make_tuple(RepeatMode::kRepeat,
+ std::vector<int>{0, 1, 2, 0, 1, 2}),
+ std::make_tuple(RepeatMode::kPingPong,
+ std::vector<int>{0, 1, 2, 1, 0, 1, 2})}));
+
+class YuvFrameReaderFramerateScaleTest
+ : public YuvFrameReaderTest,
+ public ::testing::WithParamInterface<
+ std::tuple<Ratio, std::vector<int>>> {};
+
+TEST_P(YuvFrameReaderFramerateScaleTest, PullFrame) {
+ Ratio framerate_scale = std::get<0>(GetParam());
+ std::vector<int> expected_frames = std::get<1>(GetParam());
+
+ std::vector<int> actual_frames;
+ for (size_t i = 0; i < expected_frames.size(); ++i) {
+ int pulled_frame;
+ rtc::scoped_refptr<I420BufferInterface> buffer =
+ reader_->PullFrame(&pulled_frame, kResolution, framerate_scale);
+ actual_frames.push_back(pulled_frame);
+ }
+ EXPECT_EQ(expected_frames, actual_frames);
+}
+
+INSTANTIATE_TEST_SUITE_P(YuvFrameReaderTest,
+ YuvFrameReaderFramerateScaleTest,
+ ::testing::ValuesIn({
+ std::make_tuple(Ratio({.num = 1, .den = 2}),
+ std::vector<int>{0, 2, 4}),
+ std::make_tuple(Ratio({.num = 2, .den = 3}),
+ std::vector<int>{0, 1, 3, 4, 6}),
+ std::make_tuple(Ratio({.num = 2, .den = 1}),
+ std::vector<int>{0, 0, 1, 1}),
+ }));
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_writer.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_writer.cc
new file mode 100644
index 0000000000..e5e0a6ba7f
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/yuv_frame_writer.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <string>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+YuvFrameWriterImpl::YuvFrameWriterImpl(std::string output_filename,
+ int width,
+ int height)
+ : output_filename_(output_filename),
+ frame_length_in_bytes_(0),
+ width_(width),
+ height_(height),
+ output_file_(nullptr) {}
+
+YuvFrameWriterImpl::~YuvFrameWriterImpl() {
+ Close();
+}
+
+bool YuvFrameWriterImpl::Init() {
+ if (width_ <= 0 || height_ <= 0) {
+ RTC_LOG(LS_ERROR) << "Frame width and height must be positive.";
+ return false;
+ }
+ frame_length_in_bytes_ =
+ width_ * height_ + 2 * ((width_ + 1) / 2) * ((height_ + 1) / 2);
+
+ output_file_ = fopen(output_filename_.c_str(), "wb");
+ if (output_file_ == nullptr) {
+ RTC_LOG(LS_ERROR) << "Couldn't open output file: "
+ << output_filename_.c_str();
+ return false;
+ }
+ return true;
+}
+
+bool YuvFrameWriterImpl::WriteFrame(const uint8_t* frame_buffer) {
+ RTC_DCHECK(frame_buffer);
+ if (output_file_ == nullptr) {
+ RTC_LOG(LS_ERROR) << "YuvFrameWriterImpl is not initialized.";
+ return false;
+ }
+ size_t bytes_written =
+ fwrite(frame_buffer, 1, frame_length_in_bytes_, output_file_);
+ if (bytes_written != frame_length_in_bytes_) {
+ RTC_LOG(LS_ERROR) << "Cound't write frame to file: "
+ << output_filename_.c_str();
+ return false;
+ }
+ return true;
+}
+
+void YuvFrameWriterImpl::Close() {
+ if (output_file_ != nullptr) {
+ fclose(output_file_);
+ output_file_ = nullptr;
+ }
+}
+
+size_t YuvFrameWriterImpl::FrameLength() {
+ return frame_length_in_bytes_;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/testsupport/yuv_frame_writer_unittest.cc b/third_party/libwebrtc/test/testsupport/yuv_frame_writer_unittest.cc
new file mode 100644
index 0000000000..13ed715b9e
--- /dev/null
+++ b/third_party/libwebrtc/test/testsupport/yuv_frame_writer_unittest.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+
+#include <memory>
+#include <string>
+
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+#include "test/testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+const size_t kFrameWidth = 50;
+const size_t kFrameHeight = 20;
+const size_t kFrameLength = 3 * kFrameWidth * kFrameHeight / 2; // I420.
+} // namespace
+
+class YuvFrameWriterTest : public ::testing::Test {
+ protected:
+ YuvFrameWriterTest() = default;
+ ~YuvFrameWriterTest() override = default;
+
+ void SetUp() override {
+ temp_filename_ = webrtc::test::TempFilename(webrtc::test::OutputPath(),
+ "yuv_frame_writer_unittest");
+ frame_writer_.reset(
+ new YuvFrameWriterImpl(temp_filename_, kFrameWidth, kFrameHeight));
+ ASSERT_TRUE(frame_writer_->Init());
+ }
+
+ void TearDown() override { remove(temp_filename_.c_str()); }
+
+ std::unique_ptr<FrameWriter> frame_writer_;
+ std::string temp_filename_;
+};
+
+TEST_F(YuvFrameWriterTest, InitSuccess) {}
+
+TEST_F(YuvFrameWriterTest, FrameLength) {
+ EXPECT_EQ(kFrameLength, frame_writer_->FrameLength());
+}
+
+TEST_F(YuvFrameWriterTest, WriteFrame) {
+ uint8_t buffer[kFrameLength];
+ memset(buffer, 9, kFrameLength); // Write lots of 9s to the buffer.
+ bool result = frame_writer_->WriteFrame(buffer);
+ ASSERT_TRUE(result);
+
+ frame_writer_->Close();
+ EXPECT_EQ(kFrameLength, GetFileSize(temp_filename_));
+}
+
+TEST_F(YuvFrameWriterTest, WriteFrameUninitialized) {
+ uint8_t buffer[kFrameLength];
+ YuvFrameWriterImpl frame_writer(temp_filename_, kFrameWidth, kFrameHeight);
+ EXPECT_FALSE(frame_writer.WriteFrame(buffer));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/BUILD.gn b/third_party/libwebrtc/test/time_controller/BUILD.gn
new file mode 100644
index 0000000000..b4b368a42a
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/BUILD.gn
@@ -0,0 +1,70 @@
+# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+
+rtc_library("time_controller") {
+ testonly = true
+ sources = [
+ "external_time_controller.cc",
+ "external_time_controller.h",
+ "real_time_controller.cc",
+ "real_time_controller.h",
+ "simulated_task_queue.cc",
+ "simulated_task_queue.h",
+ "simulated_thread.cc",
+ "simulated_thread.h",
+ "simulated_time_controller.cc",
+ "simulated_time_controller.h",
+ ]
+
+ deps = [
+ "../../api:sequence_checker",
+ "../../api:time_controller",
+ "../../api/task_queue",
+ "../../api/task_queue:default_task_queue_factory",
+ "../../api/units:time_delta",
+ "../../api/units:timestamp",
+ "../../rtc_base:checks",
+ "../../rtc_base:null_socket_server",
+ "../../rtc_base:platform_thread_types",
+ "../../rtc_base:rtc_base_tests_utils",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/synchronization:yield_policy",
+ "../../system_wrappers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/functional:any_invocable",
+ "//third_party/abseil-cpp/absl/strings",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_library("time_controller_unittests") {
+ testonly = true
+ sources = [
+ "external_time_controller_unittest.cc",
+ "simulated_time_controller_unittest.cc",
+ "time_controller_conformance_test.cc",
+ ]
+ deps = [
+ ":time_controller",
+ "../:test_support",
+ "../../api:time_controller",
+ "../../api/units:time_delta",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:rtc_event",
+ "../../rtc_base:rtc_task_queue",
+ "../../rtc_base:task_queue_for_test",
+ "../../rtc_base:threading",
+ "../../rtc_base/synchronization:mutex",
+ "../../rtc_base/task_utils:repeating_task",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/test/time_controller/external_time_controller.cc b/third_party/libwebrtc/test/time_controller/external_time_controller.cc
new file mode 100644
index 0000000000..f652eb686c
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/external_time_controller.cc
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/time_controller/external_time_controller.h"
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <utility>
+
+#include "absl/functional/any_invocable.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/synchronization/yield_policy.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+// Wraps a TaskQueue so that it can reschedule the time controller whenever
+// an external call schedules a new task.
+class ExternalTimeController::TaskQueueWrapper : public TaskQueueBase {
+ public:
+ TaskQueueWrapper(ExternalTimeController* parent,
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> base)
+ : parent_(parent), base_(std::move(base)) {}
+
+ void PostTask(absl::AnyInvocable<void() &&> task) override {
+ parent_->UpdateTime();
+ base_->PostTask(TaskWrapper(std::move(task)));
+ parent_->ScheduleNext();
+ }
+
+ void PostDelayedTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override {
+ parent_->UpdateTime();
+ base_->PostDelayedTask(TaskWrapper(std::move(task)), delay);
+ parent_->ScheduleNext();
+ }
+
+ void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override {
+ parent_->UpdateTime();
+ base_->PostDelayedHighPrecisionTask(TaskWrapper(std::move(task)), delay);
+ parent_->ScheduleNext();
+ }
+
+ void Delete() override { delete this; }
+
+ private:
+ absl::AnyInvocable<void() &&> TaskWrapper(
+ absl::AnyInvocable<void() &&> task) {
+ return [task = std::move(task), this]() mutable {
+ CurrentTaskQueueSetter current(this);
+ std::move(task)();
+ };
+ }
+
+ ExternalTimeController* const parent_;
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> base_;
+};
+
+ExternalTimeController::ExternalTimeController(ControlledAlarmClock* alarm)
+ : alarm_(alarm),
+ impl_(alarm_->GetClock()->CurrentTime()),
+ yield_policy_(&impl_) {
+ global_clock_.SetTime(alarm_->GetClock()->CurrentTime());
+ alarm_->SetCallback([this] { Run(); });
+}
+
+Clock* ExternalTimeController::GetClock() {
+ return alarm_->GetClock();
+}
+
+TaskQueueFactory* ExternalTimeController::GetTaskQueueFactory() {
+ return this;
+}
+
+void ExternalTimeController::AdvanceTime(TimeDelta duration) {
+ alarm_->Sleep(duration);
+}
+
+std::unique_ptr<rtc::Thread> ExternalTimeController::CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) {
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+rtc::Thread* ExternalTimeController::GetMainThread() {
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+std::unique_ptr<TaskQueueBase, TaskQueueDeleter>
+ExternalTimeController::CreateTaskQueue(
+ absl::string_view name,
+ TaskQueueFactory::Priority priority) const {
+ return std::unique_ptr<TaskQueueBase, TaskQueueDeleter>(
+ new TaskQueueWrapper(const_cast<ExternalTimeController*>(this),
+ impl_.CreateTaskQueue(name, priority)));
+}
+
+void ExternalTimeController::Run() {
+ rtc::ScopedYieldPolicy yield_policy(&impl_);
+ UpdateTime();
+ impl_.RunReadyRunners();
+ ScheduleNext();
+}
+
+void ExternalTimeController::UpdateTime() {
+ Timestamp now = alarm_->GetClock()->CurrentTime();
+ impl_.AdvanceTime(now);
+ global_clock_.SetTime(now);
+}
+
+void ExternalTimeController::ScheduleNext() {
+ RTC_DCHECK_EQ(impl_.CurrentTime(), alarm_->GetClock()->CurrentTime());
+ TimeDelta delay =
+ std::max(impl_.NextRunTime() - impl_.CurrentTime(), TimeDelta::Zero());
+ if (delay.IsFinite()) {
+ alarm_->ScheduleAlarmAt(alarm_->GetClock()->CurrentTime() + delay);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/external_time_controller.h b/third_party/libwebrtc/test/time_controller/external_time_controller.h
new file mode 100644
index 0000000000..a67f2557b4
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/external_time_controller.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_
+#define TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_
+
+#include <functional>
+#include <memory>
+
+#include "absl/strings/string_view.h"
+#include "api/task_queue/task_queue_base.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/time_controller.h"
+#include "api/units/time_delta.h"
+#include "api/units/timestamp.h"
+#include "system_wrappers/include/clock.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+// TimeController implementation built on an external controlled alarm.
+// This implementation is used to delegate scheduling and execution to an
+// external run loop.
+class ExternalTimeController : public TimeController, public TaskQueueFactory {
+ public:
+ explicit ExternalTimeController(ControlledAlarmClock* alarm);
+
+ // Implementation of TimeController.
+ Clock* GetClock() override;
+ TaskQueueFactory* GetTaskQueueFactory() override;
+ void AdvanceTime(TimeDelta duration) override;
+ std::unique_ptr<rtc::Thread> CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) override;
+ rtc::Thread* GetMainThread() override;
+
+ // Implementation of TaskQueueFactory.
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateTaskQueue(
+ absl::string_view name,
+ TaskQueueFactory::Priority priority) const override;
+
+ private:
+ class TaskQueueWrapper;
+
+ // Executes any tasks scheduled at or before the current time. May call
+ // `ScheduleNext` to schedule the next call to `Run`.
+ void Run();
+
+ void UpdateTime();
+ void ScheduleNext();
+
+ ControlledAlarmClock* alarm_;
+ sim_time_impl::SimulatedTimeControllerImpl impl_;
+ rtc::ScopedYieldPolicy yield_policy_;
+
+ // Overrides the global rtc::Clock to ensure that it reports the same times as
+ // the time controller.
+ rtc::ScopedBaseFakeClock global_clock_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_
diff --git a/third_party/libwebrtc/test/time_controller/external_time_controller_unittest.cc b/third_party/libwebrtc/test/time_controller/external_time_controller_unittest.cc
new file mode 100644
index 0000000000..13d63fe8ed
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/external_time_controller_unittest.cc
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/time_controller/external_time_controller.h"
+
+#include <atomic>
+#include <memory>
+#include <utility>
+
+#include "rtc_base/event.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+// NOTE: Since these tests rely on real time behavior, they will be flaky
+// if run on heavily loaded systems.
+namespace webrtc {
+namespace {
+using ::testing::AtLeast;
+using ::testing::Invoke;
+using ::testing::MockFunction;
+using ::testing::NiceMock;
+using ::testing::Return;
+constexpr Timestamp kStartTime = Timestamp::Seconds(1000);
+
+class FakeAlarm : public ControlledAlarmClock {
+ public:
+ explicit FakeAlarm(Timestamp start_time);
+
+ Clock* GetClock() override;
+ bool ScheduleAlarmAt(Timestamp deadline) override;
+ void SetCallback(std::function<void()> callback) override;
+ void Sleep(TimeDelta duration) override;
+
+ private:
+ SimulatedClock clock_;
+ Timestamp deadline_;
+ std::function<void()> callback_;
+};
+
+FakeAlarm::FakeAlarm(Timestamp start_time)
+ : clock_(start_time),
+ deadline_(Timestamp::PlusInfinity()),
+ callback_([] {}) {}
+
+Clock* FakeAlarm::GetClock() {
+ return &clock_;
+}
+
+bool FakeAlarm::ScheduleAlarmAt(Timestamp deadline) {
+ if (deadline < deadline_) {
+ deadline_ = deadline;
+ return true;
+ }
+ return false;
+}
+
+void FakeAlarm::SetCallback(std::function<void()> callback) {
+ callback_ = callback;
+}
+
+void FakeAlarm::Sleep(TimeDelta duration) {
+ Timestamp end_time = clock_.CurrentTime() + duration;
+
+ while (deadline_ <= end_time) {
+ clock_.AdvanceTime(deadline_ - clock_.CurrentTime());
+ deadline_ = Timestamp::PlusInfinity();
+ callback_();
+ }
+
+ clock_.AdvanceTime(end_time - clock_.CurrentTime());
+}
+
+} // namespace
+
+TEST(ExternalTimeControllerTest, TaskIsStoppedOnStop) {
+ const TimeDelta kShortInterval = TimeDelta::Millis(5);
+ const TimeDelta kLongInterval = TimeDelta::Millis(20);
+ const int kShortIntervalCount = 4;
+ const int kMargin = 1;
+ FakeAlarm alarm(kStartTime);
+ ExternalTimeController time_simulation(&alarm);
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+ std::atomic_int counter(0);
+ auto handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] {
+ if (++counter >= kShortIntervalCount)
+ return kLongInterval;
+ return kShortInterval;
+ });
+ // Sleep long enough to go through the initial phase.
+ time_simulation.AdvanceTime(kShortInterval * (kShortIntervalCount + kMargin));
+ EXPECT_EQ(counter.load(), kShortIntervalCount);
+
+ task_queue.PostTask(
+ [handle = std::move(handle)]() mutable { handle.Stop(); });
+
+ // Sleep long enough that the task would run at least once more if not
+ // stopped.
+ time_simulation.AdvanceTime(kLongInterval * 2);
+ EXPECT_EQ(counter.load(), kShortIntervalCount);
+}
+
+TEST(ExternalTimeControllerTest, TaskCanStopItself) {
+ std::atomic_int counter(0);
+ FakeAlarm alarm(kStartTime);
+ ExternalTimeController time_simulation(&alarm);
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+
+ RepeatingTaskHandle handle;
+ task_queue.PostTask([&] {
+ handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] {
+ ++counter;
+ handle.Stop();
+ return TimeDelta::Millis(2);
+ });
+ });
+ time_simulation.AdvanceTime(TimeDelta::Millis(10));
+ EXPECT_EQ(counter.load(), 1);
+}
+
+TEST(ExternalTimeControllerTest, YieldForTask) {
+ FakeAlarm alarm(kStartTime);
+ ExternalTimeController time_simulation(&alarm);
+
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+
+ rtc::Event event;
+ task_queue.PostTask([&] { event.Set(); });
+ EXPECT_TRUE(event.Wait(TimeDelta::Millis(200)));
+}
+
+TEST(ExternalTimeControllerTest, TasksYieldToEachOther) {
+ FakeAlarm alarm(kStartTime);
+ ExternalTimeController time_simulation(&alarm);
+
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+ rtc::TaskQueue other_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "OtherQueue", TaskQueueFactory::Priority::NORMAL));
+
+ task_queue.PostTask([&] {
+ rtc::Event event;
+ other_queue.PostTask([&] { event.Set(); });
+ EXPECT_TRUE(event.Wait(TimeDelta::Millis(200)));
+ });
+
+ time_simulation.AdvanceTime(TimeDelta::Millis(300));
+}
+
+TEST(ExternalTimeControllerTest, CurrentTaskQueue) {
+ FakeAlarm alarm(kStartTime);
+ ExternalTimeController time_simulation(&alarm);
+
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+
+ task_queue.PostTask([&] { EXPECT_TRUE(task_queue.IsCurrent()); });
+
+ time_simulation.AdvanceTime(TimeDelta::Millis(10));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/real_time_controller.cc b/third_party/libwebrtc/test/time_controller/real_time_controller.cc
new file mode 100644
index 0000000000..7cc750d6d4
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/real_time_controller.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/time_controller/real_time_controller.h"
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "rtc_base/null_socket_server.h"
+
+namespace webrtc {
+namespace {
+class MainThread : public rtc::Thread {
+ public:
+ MainThread()
+ : Thread(std::make_unique<rtc::NullSocketServer>(), false),
+ current_setter_(this) {
+ DoInit();
+ }
+ ~MainThread() {
+ Stop();
+ DoDestroy();
+ }
+
+ private:
+ CurrentThreadSetter current_setter_;
+};
+} // namespace
+RealTimeController::RealTimeController()
+ : task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ main_thread_(std::make_unique<MainThread>()) {
+ main_thread_->SetName("Main", this);
+}
+
+Clock* RealTimeController::GetClock() {
+ return Clock::GetRealTimeClock();
+}
+
+TaskQueueFactory* RealTimeController::GetTaskQueueFactory() {
+ return task_queue_factory_.get();
+}
+
+std::unique_ptr<rtc::Thread> RealTimeController::CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) {
+ if (!socket_server)
+ socket_server = std::make_unique<rtc::NullSocketServer>();
+ auto res = std::make_unique<rtc::Thread>(std::move(socket_server));
+ res->SetName(name, nullptr);
+ res->Start();
+ return res;
+}
+
+rtc::Thread* RealTimeController::GetMainThread() {
+ return main_thread_.get();
+}
+
+void RealTimeController::AdvanceTime(TimeDelta duration) {
+ main_thread_->ProcessMessages(duration.ms());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/real_time_controller.h b/third_party/libwebrtc/test/time_controller/real_time_controller.h
new file mode 100644
index 0000000000..5f02eaf85f
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/real_time_controller.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TIME_CONTROLLER_REAL_TIME_CONTROLLER_H_
+#define TEST_TIME_CONTROLLER_REAL_TIME_CONTROLLER_H_
+
+#include <functional>
+#include <memory>
+
+#include "api/task_queue/task_queue_factory.h"
+#include "api/test/time_controller.h"
+#include "api/units/time_delta.h"
+#include "system_wrappers/include/clock.h"
+
+namespace webrtc {
+class RealTimeController : public TimeController {
+ public:
+ RealTimeController();
+
+ Clock* GetClock() override;
+ TaskQueueFactory* GetTaskQueueFactory() override;
+ std::unique_ptr<rtc::Thread> CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) override;
+ rtc::Thread* GetMainThread() override;
+ void AdvanceTime(TimeDelta duration) override;
+
+ private:
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ const std::unique_ptr<rtc::Thread> main_thread_;
+};
+
+} // namespace webrtc
+
+#endif // TEST_TIME_CONTROLLER_REAL_TIME_CONTROLLER_H_
diff --git a/third_party/libwebrtc/test/time_controller/simulated_task_queue.cc b/third_party/libwebrtc/test/time_controller/simulated_task_queue.cc
new file mode 100644
index 0000000000..3c26721845
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_task_queue.cc
@@ -0,0 +1,89 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/time_controller/simulated_task_queue.h"
+
+#include <algorithm>
+#include <utility>
+
+namespace webrtc {
+
+SimulatedTaskQueue::SimulatedTaskQueue(
+ sim_time_impl::SimulatedTimeControllerImpl* handler,
+ absl::string_view name)
+ : handler_(handler), name_(new char[name.size()]) {
+ std::copy_n(name.begin(), name.size(), name_);
+}
+
+SimulatedTaskQueue::~SimulatedTaskQueue() {
+ handler_->Unregister(this);
+ delete[] name_;
+}
+
+void SimulatedTaskQueue::Delete() {
+ // Need to destroy the tasks outside of the lock because task destruction
+ // can lead to re-entry in SimulatedTaskQueue via custom destructors.
+ std::deque<absl::AnyInvocable<void() &&>> ready_tasks;
+ std::map<Timestamp, std::vector<absl::AnyInvocable<void() &&>>> delayed_tasks;
+ {
+ MutexLock lock(&lock_);
+ ready_tasks_.swap(ready_tasks);
+ delayed_tasks_.swap(delayed_tasks);
+ }
+ ready_tasks.clear();
+ delayed_tasks.clear();
+ delete this;
+}
+
+void SimulatedTaskQueue::RunReady(Timestamp at_time) {
+ MutexLock lock(&lock_);
+ for (auto it = delayed_tasks_.begin();
+ it != delayed_tasks_.end() && it->first <= at_time;
+ it = delayed_tasks_.erase(it)) {
+ for (auto& task : it->second) {
+ ready_tasks_.push_back(std::move(task));
+ }
+ }
+ CurrentTaskQueueSetter set_current(this);
+ while (!ready_tasks_.empty()) {
+ absl::AnyInvocable<void()&&> ready = std::move(ready_tasks_.front());
+ ready_tasks_.pop_front();
+ lock_.Unlock();
+ std::move(ready)();
+ ready = nullptr;
+ lock_.Lock();
+ }
+ if (!delayed_tasks_.empty()) {
+ next_run_time_ = delayed_tasks_.begin()->first;
+ } else {
+ next_run_time_ = Timestamp::PlusInfinity();
+ }
+}
+
+void SimulatedTaskQueue::PostTask(absl::AnyInvocable<void() &&> task) {
+ MutexLock lock(&lock_);
+ ready_tasks_.push_back(std::move(task));
+ next_run_time_ = Timestamp::MinusInfinity();
+}
+
+void SimulatedTaskQueue::PostDelayedTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) {
+ PostDelayedHighPrecisionTask(std::move(task), delay);
+}
+
+void SimulatedTaskQueue::PostDelayedHighPrecisionTask(
+ absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) {
+ MutexLock lock(&lock_);
+ Timestamp target_time = handler_->CurrentTime() + delay;
+ delayed_tasks_[target_time].push_back(std::move(task));
+ next_run_time_ = std::min(next_run_time_, target_time);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/simulated_task_queue.h b/third_party/libwebrtc/test/time_controller/simulated_task_queue.h
new file mode 100644
index 0000000000..3c55f15dde
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_task_queue.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TIME_CONTROLLER_SIMULATED_TASK_QUEUE_H_
+#define TEST_TIME_CONTROLLER_SIMULATED_TASK_QUEUE_H_
+
+#include <deque>
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "absl/functional/any_invocable.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+class SimulatedTaskQueue : public TaskQueueBase,
+ public sim_time_impl::SimulatedSequenceRunner {
+ public:
+ SimulatedTaskQueue(sim_time_impl::SimulatedTimeControllerImpl* handler,
+ absl::string_view name);
+
+ ~SimulatedTaskQueue();
+
+ void RunReady(Timestamp at_time) override;
+
+ Timestamp GetNextRunTime() const override {
+ MutexLock lock(&lock_);
+ return next_run_time_;
+ }
+ TaskQueueBase* GetAsTaskQueue() override { return this; }
+
+ // TaskQueueBase interface
+ void Delete() override;
+ void PostTask(absl::AnyInvocable<void() &&> task) override;
+ void PostDelayedTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override;
+ void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override;
+
+ private:
+ sim_time_impl::SimulatedTimeControllerImpl* const handler_;
+ // Using char* to be debugger friendly.
+ char* name_;
+
+ mutable Mutex lock_;
+
+ std::deque<absl::AnyInvocable<void() &&>> ready_tasks_ RTC_GUARDED_BY(lock_);
+ std::map<Timestamp, std::vector<absl::AnyInvocable<void() &&>>> delayed_tasks_
+ RTC_GUARDED_BY(lock_);
+
+ Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity();
+};
+
+} // namespace webrtc
+
+#endif // TEST_TIME_CONTROLLER_SIMULATED_TASK_QUEUE_H_
diff --git a/third_party/libwebrtc/test/time_controller/simulated_thread.cc b/third_party/libwebrtc/test/time_controller/simulated_thread.cc
new file mode 100644
index 0000000000..bdd1096327
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_thread.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/time_controller/simulated_thread.h"
+
+#include <algorithm>
+#include <utility>
+
+namespace webrtc {
+namespace {
+
+// A socket server that does nothing. It's different from NullSocketServer in
+// that it does allow sleep/wakeup. This avoids usage of an Event instance which
+// otherwise would cause issues with the simulated Yeild behavior.
+class DummySocketServer : public rtc::SocketServer {
+ public:
+ rtc::Socket* CreateSocket(int family, int type) override {
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+ }
+ bool Wait(TimeDelta max_wait_duration, bool process_io) override {
+ RTC_CHECK(max_wait_duration.IsZero());
+ return true;
+ }
+ void WakeUp() override {}
+};
+
+} // namespace
+
+SimulatedThread::SimulatedThread(
+ sim_time_impl::SimulatedTimeControllerImpl* handler,
+ absl::string_view name,
+ std::unique_ptr<rtc::SocketServer> socket_server)
+ : rtc::Thread(socket_server ? std::move(socket_server)
+ : std::make_unique<DummySocketServer>()),
+ handler_(handler),
+ name_(new char[name.size()]) {
+ std::copy_n(name.begin(), name.size(), name_);
+}
+
+SimulatedThread::~SimulatedThread() {
+ handler_->Unregister(this);
+ delete[] name_;
+}
+
+void SimulatedThread::RunReady(Timestamp at_time) {
+ CurrentThreadSetter set_current(this);
+ ProcessMessages(0);
+ int delay_ms = GetDelay();
+ MutexLock lock(&lock_);
+ if (delay_ms == kForever) {
+ next_run_time_ = Timestamp::PlusInfinity();
+ } else {
+ next_run_time_ = at_time + TimeDelta::Millis(delay_ms);
+ }
+}
+
+void SimulatedThread::BlockingCall(rtc::FunctionView<void()> functor) {
+ if (IsQuitting())
+ return;
+
+ if (IsCurrent()) {
+ functor();
+ } else {
+ TaskQueueBase* yielding_from = TaskQueueBase::Current();
+ handler_->StartYield(yielding_from);
+ RunReady(Timestamp::MinusInfinity());
+ CurrentThreadSetter set_current(this);
+ functor();
+ handler_->StopYield(yielding_from);
+ }
+}
+
+void SimulatedThread::PostTask(absl::AnyInvocable<void() &&> task) {
+ rtc::Thread::PostTask(std::move(task));
+ MutexLock lock(&lock_);
+ next_run_time_ = Timestamp::MinusInfinity();
+}
+
+void SimulatedThread::PostDelayedTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) {
+ rtc::Thread::PostDelayedTask(std::move(task), delay);
+ MutexLock lock(&lock_);
+ next_run_time_ =
+ std::min(next_run_time_, Timestamp::Millis(rtc::TimeMillis()) + delay);
+}
+
+void SimulatedThread::PostDelayedHighPrecisionTask(
+ absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) {
+ rtc::Thread::PostDelayedHighPrecisionTask(std::move(task), delay);
+ MutexLock lock(&lock_);
+ next_run_time_ =
+ std::min(next_run_time_, Timestamp::Millis(rtc::TimeMillis()) + delay);
+}
+
+void SimulatedThread::Stop() {
+ Thread::Quit();
+}
+
+SimulatedMainThread::SimulatedMainThread(
+ sim_time_impl::SimulatedTimeControllerImpl* handler)
+ : SimulatedThread(handler, "main", nullptr), current_setter_(this) {}
+
+SimulatedMainThread::~SimulatedMainThread() {
+ // Removes pending tasks in case they keep shared pointer references to
+ // objects whose destructor expects to run before the Thread destructor.
+ Stop();
+ DoDestroy();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/simulated_thread.h b/third_party/libwebrtc/test/time_controller/simulated_thread.h
new file mode 100644
index 0000000000..e8e08c5000
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_thread.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TIME_CONTROLLER_SIMULATED_THREAD_H_
+#define TEST_TIME_CONTROLLER_SIMULATED_THREAD_H_
+
+#include <memory>
+
+#include "rtc_base/synchronization/mutex.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+
+class SimulatedThread : public rtc::Thread,
+ public sim_time_impl::SimulatedSequenceRunner {
+ public:
+ using CurrentThreadSetter = CurrentThreadSetter;
+ SimulatedThread(sim_time_impl::SimulatedTimeControllerImpl* handler,
+ absl::string_view name,
+ std::unique_ptr<rtc::SocketServer> socket_server);
+ ~SimulatedThread() override;
+
+ void RunReady(Timestamp at_time) override;
+
+ Timestamp GetNextRunTime() const override {
+ MutexLock lock(&lock_);
+ return next_run_time_;
+ }
+
+ TaskQueueBase* GetAsTaskQueue() override { return this; }
+
+ // Thread interface
+ void BlockingCall(rtc::FunctionView<void()> functor) override;
+ void PostTask(absl::AnyInvocable<void() &&> task) override;
+ void PostDelayedTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override;
+ void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> task,
+ TimeDelta delay) override;
+
+ void Stop() override;
+
+ private:
+ sim_time_impl::SimulatedTimeControllerImpl* const handler_;
+ // Using char* to be debugger friendly.
+ char* name_;
+ mutable Mutex lock_;
+ Timestamp next_run_time_ RTC_GUARDED_BY(lock_) = Timestamp::PlusInfinity();
+};
+
+class SimulatedMainThread : public SimulatedThread {
+ public:
+ explicit SimulatedMainThread(
+ sim_time_impl::SimulatedTimeControllerImpl* handler);
+ ~SimulatedMainThread();
+
+ private:
+ CurrentThreadSetter current_setter_;
+};
+} // namespace webrtc
+#endif // TEST_TIME_CONTROLLER_SIMULATED_THREAD_H_
diff --git a/third_party/libwebrtc/test/time_controller/simulated_time_controller.cc b/third_party/libwebrtc/test/time_controller/simulated_time_controller.cc
new file mode 100644
index 0000000000..1ed2b30dc8
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_time_controller.cc
@@ -0,0 +1,224 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/time_controller/simulated_time_controller.h"
+
+#include <algorithm>
+#include <deque>
+#include <list>
+#include <memory>
+#include <string>
+#include <thread>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "test/time_controller/simulated_task_queue.h"
+#include "test/time_controller/simulated_thread.h"
+
+namespace webrtc {
+namespace {
+// Helper function to remove from a std container by value.
+template <class C>
+bool RemoveByValue(C* vec, typename C::value_type val) {
+ auto it = std::find(vec->begin(), vec->end(), val);
+ if (it == vec->end())
+ return false;
+ vec->erase(it);
+ return true;
+}
+} // namespace
+
+namespace sim_time_impl {
+
+SimulatedTimeControllerImpl::SimulatedTimeControllerImpl(Timestamp start_time)
+ : thread_id_(rtc::CurrentThreadId()), current_time_(start_time) {}
+
+SimulatedTimeControllerImpl::~SimulatedTimeControllerImpl() = default;
+
+std::unique_ptr<TaskQueueBase, TaskQueueDeleter>
+SimulatedTimeControllerImpl::CreateTaskQueue(
+ absl::string_view name,
+ TaskQueueFactory::Priority priority) const {
+ // TODO(srte): Remove the const cast when the interface is made mutable.
+ auto mutable_this = const_cast<SimulatedTimeControllerImpl*>(this);
+ auto task_queue = std::unique_ptr<SimulatedTaskQueue, TaskQueueDeleter>(
+ new SimulatedTaskQueue(mutable_this, name));
+ mutable_this->Register(task_queue.get());
+ return task_queue;
+}
+
+std::unique_ptr<rtc::Thread> SimulatedTimeControllerImpl::CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) {
+ auto thread =
+ std::make_unique<SimulatedThread>(this, name, std::move(socket_server));
+ Register(thread.get());
+ return thread;
+}
+
+void SimulatedTimeControllerImpl::YieldExecution() {
+ if (rtc::CurrentThreadId() == thread_id_) {
+ TaskQueueBase* yielding_from = TaskQueueBase::Current();
+ // Since we might continue execution on a process thread, we should reset
+ // the thread local task queue reference. This ensures that thread checkers
+ // won't think we are executing on the yielding task queue. It also ensure
+ // that TaskQueueBase::Current() won't return the yielding task queue.
+ TokenTaskQueue::CurrentTaskQueueSetter reset_queue(nullptr);
+ // When we yield, we don't want to risk executing further tasks on the
+ // currently executing task queue. If there's a ready task that also yields,
+ // it's added to this set as well and only tasks on the remaining task
+ // queues are executed.
+ auto inserted = yielded_.insert(yielding_from);
+ RTC_DCHECK(inserted.second);
+ RunReadyRunners();
+ yielded_.erase(inserted.first);
+ }
+}
+
+void SimulatedTimeControllerImpl::RunReadyRunners() {
+ // Using a dummy thread rather than nullptr to avoid implicit thread creation
+ // by Thread::Current().
+ SimulatedThread::CurrentThreadSetter set_current(dummy_thread_.get());
+ MutexLock lock(&lock_);
+ RTC_DCHECK_EQ(rtc::CurrentThreadId(), thread_id_);
+ Timestamp current_time = CurrentTime();
+ // Clearing `ready_runners_` in case this is a recursive call:
+ // RunReadyRunners -> Run -> Event::Wait -> Yield ->RunReadyRunners
+ ready_runners_.clear();
+
+ // We repeat until we have no ready left to handle tasks posted by ready
+ // runners.
+ while (true) {
+ for (auto* runner : runners_) {
+ if (yielded_.find(runner->GetAsTaskQueue()) == yielded_.end() &&
+ runner->GetNextRunTime() <= current_time) {
+ ready_runners_.push_back(runner);
+ }
+ }
+ if (ready_runners_.empty())
+ break;
+ while (!ready_runners_.empty()) {
+ auto* runner = ready_runners_.front();
+ ready_runners_.pop_front();
+ lock_.Unlock();
+ // Note that the RunReady function might indirectly cause a call to
+ // Unregister() which will grab `lock_` again to remove items from
+ // `ready_runners_`.
+ runner->RunReady(current_time);
+ lock_.Lock();
+ }
+ }
+}
+
+Timestamp SimulatedTimeControllerImpl::CurrentTime() const {
+ MutexLock lock(&time_lock_);
+ return current_time_;
+}
+
+Timestamp SimulatedTimeControllerImpl::NextRunTime() const {
+ Timestamp current_time = CurrentTime();
+ Timestamp next_time = Timestamp::PlusInfinity();
+ MutexLock lock(&lock_);
+ for (auto* runner : runners_) {
+ Timestamp next_run_time = runner->GetNextRunTime();
+ if (next_run_time <= current_time)
+ return current_time;
+ next_time = std::min(next_time, next_run_time);
+ }
+ return next_time;
+}
+
+void SimulatedTimeControllerImpl::AdvanceTime(Timestamp target_time) {
+ MutexLock time_lock(&time_lock_);
+ RTC_DCHECK_GE(target_time, current_time_);
+ current_time_ = target_time;
+}
+
+void SimulatedTimeControllerImpl::Register(SimulatedSequenceRunner* runner) {
+ MutexLock lock(&lock_);
+ runners_.push_back(runner);
+}
+
+void SimulatedTimeControllerImpl::Unregister(SimulatedSequenceRunner* runner) {
+ MutexLock lock(&lock_);
+ bool removed = RemoveByValue(&runners_, runner);
+ RTC_CHECK(removed);
+ RemoveByValue(&ready_runners_, runner);
+}
+
+void SimulatedTimeControllerImpl::StartYield(TaskQueueBase* yielding_from) {
+ auto inserted = yielded_.insert(yielding_from);
+ RTC_DCHECK(inserted.second);
+}
+
+void SimulatedTimeControllerImpl::StopYield(TaskQueueBase* yielding_from) {
+ yielded_.erase(yielding_from);
+}
+
+} // namespace sim_time_impl
+
+GlobalSimulatedTimeController::GlobalSimulatedTimeController(
+ Timestamp start_time)
+ : sim_clock_(start_time.us()), impl_(start_time), yield_policy_(&impl_) {
+ global_clock_.SetTime(start_time);
+ auto main_thread = std::make_unique<SimulatedMainThread>(&impl_);
+ impl_.Register(main_thread.get());
+ main_thread_ = std::move(main_thread);
+}
+
+GlobalSimulatedTimeController::~GlobalSimulatedTimeController() = default;
+
+Clock* GlobalSimulatedTimeController::GetClock() {
+ return &sim_clock_;
+}
+
+TaskQueueFactory* GlobalSimulatedTimeController::GetTaskQueueFactory() {
+ return &impl_;
+}
+
+std::unique_ptr<rtc::Thread> GlobalSimulatedTimeController::CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) {
+ return impl_.CreateThread(name, std::move(socket_server));
+}
+
+rtc::Thread* GlobalSimulatedTimeController::GetMainThread() {
+ return main_thread_.get();
+}
+
+void GlobalSimulatedTimeController::AdvanceTime(TimeDelta duration) {
+ rtc::ScopedYieldPolicy yield_policy(&impl_);
+ Timestamp current_time = impl_.CurrentTime();
+ Timestamp target_time = current_time + duration;
+ RTC_DCHECK_EQ(current_time.us(), rtc::TimeMicros());
+ while (current_time < target_time) {
+ impl_.RunReadyRunners();
+ Timestamp next_time = std::min(impl_.NextRunTime(), target_time);
+ impl_.AdvanceTime(next_time);
+ auto delta = next_time - current_time;
+ current_time = next_time;
+ sim_clock_.AdvanceTimeMicroseconds(delta.us());
+ global_clock_.AdvanceTime(delta);
+ }
+ // After time has been simulated up until `target_time` we also need to run
+ // tasks meant to be executed at `target_time`.
+ impl_.RunReadyRunners();
+}
+
+void GlobalSimulatedTimeController::Register(
+ sim_time_impl::SimulatedSequenceRunner* runner) {
+ impl_.Register(runner);
+}
+
+void GlobalSimulatedTimeController::Unregister(
+ sim_time_impl::SimulatedSequenceRunner* runner) {
+ impl_.Unregister(runner);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/simulated_time_controller.h b/third_party/libwebrtc/test/time_controller/simulated_time_controller.h
new file mode 100644
index 0000000000..121b9171e8
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_time_controller.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_TIME_CONTROLLER_SIMULATED_TIME_CONTROLLER_H_
+#define TEST_TIME_CONTROLLER_SIMULATED_TIME_CONTROLLER_H_
+
+#include <list>
+#include <memory>
+#include <unordered_set>
+#include <utility>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "api/sequence_checker.h"
+#include "api/test/time_controller.h"
+#include "api/units/timestamp.h"
+#include "rtc_base/fake_clock.h"
+#include "rtc_base/platform_thread_types.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/synchronization/yield_policy.h"
+
+namespace webrtc {
+namespace sim_time_impl {
+class SimulatedSequenceRunner {
+ public:
+ virtual ~SimulatedSequenceRunner() = default;
+ // Provides next run time.
+ virtual Timestamp GetNextRunTime() const = 0;
+ // Runs all ready tasks and modules and updates next run time.
+ virtual void RunReady(Timestamp at_time) = 0;
+
+ // All implementations also implements TaskQueueBase in some form, but if we'd
+ // inherit from it in this interface we'd run into issues with double
+ // inheritance. Therefore we simply allow the implementations to provide a
+ // casted pointer to themself.
+ virtual TaskQueueBase* GetAsTaskQueue() = 0;
+};
+
+class SimulatedTimeControllerImpl : public TaskQueueFactory,
+ public rtc::YieldInterface {
+ public:
+ explicit SimulatedTimeControllerImpl(Timestamp start_time);
+ ~SimulatedTimeControllerImpl() override;
+
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> CreateTaskQueue(
+ absl::string_view name,
+ Priority priority) const RTC_LOCKS_EXCLUDED(time_lock_) override;
+
+ // Implements the YieldInterface by running ready tasks on all task queues,
+ // except that if this method is called from a task, the task queue running
+ // that task is skipped.
+ void YieldExecution() RTC_LOCKS_EXCLUDED(time_lock_, lock_) override;
+
+ // Create thread using provided `socket_server`.
+ std::unique_ptr<rtc::Thread> CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server)
+ RTC_LOCKS_EXCLUDED(time_lock_, lock_);
+
+ // Runs all runners in `runners_` that has tasks or modules ready for
+ // execution.
+ void RunReadyRunners() RTC_LOCKS_EXCLUDED(time_lock_, lock_);
+ // Return `current_time_`.
+ Timestamp CurrentTime() const RTC_LOCKS_EXCLUDED(time_lock_);
+ // Return min of runner->GetNextRunTime() for runner in `runners_`.
+ Timestamp NextRunTime() const RTC_LOCKS_EXCLUDED(lock_);
+ // Set `current_time_` to `target_time`.
+ void AdvanceTime(Timestamp target_time) RTC_LOCKS_EXCLUDED(time_lock_);
+ // Adds `runner` to `runners_`.
+ void Register(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
+ // Removes `runner` from `runners_`.
+ void Unregister(SimulatedSequenceRunner* runner) RTC_LOCKS_EXCLUDED(lock_);
+
+ // Indicates that `yielding_from` is not ready to run.
+ void StartYield(TaskQueueBase* yielding_from);
+ // Indicates that processing can be continued on `yielding_from`.
+ void StopYield(TaskQueueBase* yielding_from);
+
+ private:
+ const rtc::PlatformThreadId thread_id_;
+ const std::unique_ptr<rtc::Thread> dummy_thread_ = rtc::Thread::Create();
+ mutable Mutex time_lock_;
+ Timestamp current_time_ RTC_GUARDED_BY(time_lock_);
+ mutable Mutex lock_;
+ std::vector<SimulatedSequenceRunner*> runners_ RTC_GUARDED_BY(lock_);
+ // Used in RunReadyRunners() to keep track of ready runners that are to be
+ // processed in a round robin fashion. the reason it's a member is so that
+ // runners can removed from here by Unregister().
+ std::list<SimulatedSequenceRunner*> ready_runners_ RTC_GUARDED_BY(lock_);
+
+ // Runners on which YieldExecution has been called.
+ std::unordered_set<TaskQueueBase*> yielded_;
+};
+} // namespace sim_time_impl
+
+// Used to satisfy sequence checkers for non task queue sequences.
+class TokenTaskQueue : public TaskQueueBase {
+ public:
+ // Promoted to public
+ using CurrentTaskQueueSetter = TaskQueueBase::CurrentTaskQueueSetter;
+
+ void Delete() override { RTC_DCHECK_NOTREACHED(); }
+ void PostTask(absl::AnyInvocable<void() &&> /*task*/) override {
+ RTC_DCHECK_NOTREACHED();
+ }
+ void PostDelayedTask(absl::AnyInvocable<void() &&> /*task*/,
+ TimeDelta /*delay*/) override {
+ RTC_DCHECK_NOTREACHED();
+ }
+ void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> /*task*/,
+ TimeDelta /*delay*/) override {
+ RTC_DCHECK_NOTREACHED();
+ }
+};
+
+// TimeController implementation using completely simulated time. Task queues
+// and process threads created by this controller will run delayed activities
+// when AdvanceTime() is called. Overrides the global clock backing
+// rtc::TimeMillis() and rtc::TimeMicros(). Note that this is not thread safe
+// since it modifies global state.
+class GlobalSimulatedTimeController : public TimeController {
+ public:
+ explicit GlobalSimulatedTimeController(Timestamp start_time);
+ ~GlobalSimulatedTimeController() override;
+
+ Clock* GetClock() override;
+ TaskQueueFactory* GetTaskQueueFactory() override;
+ std::unique_ptr<rtc::Thread> CreateThread(
+ const std::string& name,
+ std::unique_ptr<rtc::SocketServer> socket_server) override;
+ rtc::Thread* GetMainThread() override;
+
+ void AdvanceTime(TimeDelta duration) override;
+
+ // Makes the simulated time controller aware of a custom
+ // SimulatedSequenceRunner.
+ // TODO(bugs.webrtc.org/11581): remove method once the ModuleRtpRtcpImpl2 unit
+ // test stops using it.
+ void Register(sim_time_impl::SimulatedSequenceRunner* runner);
+ // Removes a previously installed custom SimulatedSequenceRunner from the
+ // simulated time controller.
+ // TODO(bugs.webrtc.org/11581): remove method once the ModuleRtpRtcpImpl2 unit
+ // test stops using it.
+ void Unregister(sim_time_impl::SimulatedSequenceRunner* runner);
+
+ private:
+ rtc::ScopedBaseFakeClock global_clock_;
+ // Provides simulated CurrentNtpInMilliseconds()
+ SimulatedClock sim_clock_;
+ sim_time_impl::SimulatedTimeControllerImpl impl_;
+ rtc::ScopedYieldPolicy yield_policy_;
+ std::unique_ptr<rtc::Thread> main_thread_;
+};
+} // namespace webrtc
+
+#endif // TEST_TIME_CONTROLLER_SIMULATED_TIME_CONTROLLER_H_
diff --git a/third_party/libwebrtc/test/time_controller/simulated_time_controller_unittest.cc b/third_party/libwebrtc/test/time_controller/simulated_time_controller_unittest.cc
new file mode 100644
index 0000000000..1ee592cc7c
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/simulated_time_controller_unittest.cc
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/time_controller/simulated_time_controller.h"
+
+#include <atomic>
+#include <memory>
+
+#include "rtc_base/event.h"
+#include "rtc_base/task_queue.h"
+#include "rtc_base/task_queue_for_test.h"
+#include "rtc_base/task_utils/repeating_task.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+// NOTE: Since these tests rely on real time behavior, they will be flaky
+// if run on heavily loaded systems.
+namespace webrtc {
+namespace {
+using ::testing::AtLeast;
+using ::testing::Invoke;
+using ::testing::MockFunction;
+using ::testing::NiceMock;
+using ::testing::Return;
+constexpr Timestamp kStartTime = Timestamp::Seconds(1000);
+} // namespace
+
+TEST(SimulatedTimeControllerTest, TaskIsStoppedOnStop) {
+ const TimeDelta kShortInterval = TimeDelta::Millis(5);
+ const TimeDelta kLongInterval = TimeDelta::Millis(20);
+ const int kShortIntervalCount = 4;
+ const int kMargin = 1;
+ GlobalSimulatedTimeController time_simulation(kStartTime);
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+ std::atomic_int counter(0);
+ auto handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] {
+ if (++counter >= kShortIntervalCount)
+ return kLongInterval;
+ return kShortInterval;
+ });
+ // Sleep long enough to go through the initial phase.
+ time_simulation.AdvanceTime(kShortInterval * (kShortIntervalCount + kMargin));
+ EXPECT_EQ(counter.load(), kShortIntervalCount);
+
+ task_queue.PostTask(
+ [handle = std::move(handle)]() mutable { handle.Stop(); });
+
+ // Sleep long enough that the task would run at least once more if not
+ // stopped.
+ time_simulation.AdvanceTime(kLongInterval * 2);
+ EXPECT_EQ(counter.load(), kShortIntervalCount);
+}
+
+TEST(SimulatedTimeControllerTest, TaskCanStopItself) {
+ std::atomic_int counter(0);
+ GlobalSimulatedTimeController time_simulation(kStartTime);
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+
+ RepeatingTaskHandle handle;
+ task_queue.PostTask([&] {
+ handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] {
+ ++counter;
+ handle.Stop();
+ return TimeDelta::Millis(2);
+ });
+ });
+ time_simulation.AdvanceTime(TimeDelta::Millis(10));
+ EXPECT_EQ(counter.load(), 1);
+}
+
+TEST(SimulatedTimeControllerTest, Example) {
+ class ObjectOnTaskQueue {
+ public:
+ void DoPeriodicTask() {}
+ TimeDelta TimeUntilNextRun() { return TimeDelta::Millis(100); }
+ void StartPeriodicTask(RepeatingTaskHandle* handle,
+ rtc::TaskQueue* task_queue) {
+ *handle = RepeatingTaskHandle::Start(task_queue->Get(), [this] {
+ DoPeriodicTask();
+ return TimeUntilNextRun();
+ });
+ }
+ };
+ GlobalSimulatedTimeController time_simulation(kStartTime);
+ rtc::TaskQueue task_queue(
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL));
+ auto object = std::make_unique<ObjectOnTaskQueue>();
+ // Create and start the periodic task.
+ RepeatingTaskHandle handle;
+ object->StartPeriodicTask(&handle, &task_queue);
+ // Restart the task
+ task_queue.PostTask(
+ [handle = std::move(handle)]() mutable { handle.Stop(); });
+ object->StartPeriodicTask(&handle, &task_queue);
+ task_queue.PostTask(
+ [handle = std::move(handle)]() mutable { handle.Stop(); });
+
+ task_queue.PostTask([object = std::move(object)] {});
+}
+
+TEST(SimulatedTimeControllerTest, DelayTaskRunOnTime) {
+ GlobalSimulatedTimeController time_simulation(kStartTime);
+ std::unique_ptr<TaskQueueBase, TaskQueueDeleter> task_queue =
+ time_simulation.GetTaskQueueFactory()->CreateTaskQueue(
+ "TestQueue", TaskQueueFactory::Priority::NORMAL);
+
+ bool delay_task_executed = false;
+ task_queue->PostDelayedTask([&] { delay_task_executed = true; },
+ TimeDelta::Millis(10));
+
+ time_simulation.AdvanceTime(TimeDelta::Millis(10));
+ EXPECT_TRUE(delay_task_executed);
+}
+
+TEST(SimulatedTimeControllerTest, ThreadYeildsOnSynchronousCall) {
+ GlobalSimulatedTimeController sim(kStartTime);
+ auto main_thread = sim.GetMainThread();
+ auto t2 = sim.CreateThread("thread", nullptr);
+ bool task_has_run = false;
+ // Posting a task to the main thread, this should not run until AdvanceTime is
+ // called.
+ main_thread->PostTask([&] { task_has_run = true; });
+ SendTask(t2.get(), [] {
+ rtc::Event yield_event;
+ // Wait() triggers YieldExecution() which will runs message processing on
+ // all threads that are not in the yielded set.
+
+ yield_event.Wait(TimeDelta::Zero());
+ });
+ // Since we are doing an invoke from the main thread, we don't expect the main
+ // thread message loop to be processed.
+ EXPECT_FALSE(task_has_run);
+ sim.AdvanceTime(TimeDelta::Seconds(1));
+ ASSERT_TRUE(task_has_run);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/time_controller/time_controller_conformance_test.cc b/third_party/libwebrtc/test/time_controller/time_controller_conformance_test.cc
new file mode 100644
index 0000000000..300dd9175c
--- /dev/null
+++ b/third_party/libwebrtc/test/time_controller/time_controller_conformance_test.cc
@@ -0,0 +1,181 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "api/test/time_controller.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/time_controller/real_time_controller.h"
+#include "test/time_controller/simulated_time_controller.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAreArray;
+using ::testing::TestParamInfo;
+using ::testing::TestWithParam;
+using ::testing::Values;
+
+enum class TimeMode { kRealTime, kSimulated };
+
+std::unique_ptr<TimeController> CreateTimeController(TimeMode mode) {
+ switch (mode) {
+ case TimeMode::kRealTime:
+ return std::make_unique<RealTimeController>();
+ case TimeMode::kSimulated:
+ // Using an offset of 100000 to get nice fixed width and readable
+ // timestamps in typical test scenarios.
+ constexpr Timestamp kSimulatedStartTime = Timestamp::Seconds(100000);
+ return std::make_unique<GlobalSimulatedTimeController>(
+ kSimulatedStartTime);
+ }
+}
+
+std::string ParamsToString(const TestParamInfo<webrtc::TimeMode>& param) {
+ switch (param.param) {
+ case webrtc::TimeMode::kRealTime:
+ return "RealTime";
+ case webrtc::TimeMode::kSimulated:
+ return "SimulatedTime";
+ default:
+ RTC_DCHECK_NOTREACHED() << "Time mode not supported";
+ }
+}
+
+// Keeps order of executions. May be called from different threads.
+class ExecutionOrderKeeper {
+ public:
+ void Executed(int execution_id) {
+ MutexLock lock(&mutex_);
+ order_.push_back(execution_id);
+ }
+
+ std::vector<int> order() const {
+ MutexLock lock(&mutex_);
+ return order_;
+ }
+
+ private:
+ mutable Mutex mutex_;
+ std::vector<int> order_ RTC_GUARDED_BY(mutex_);
+};
+
+// Tests conformance between real time and simulated time time controller.
+class SimulatedRealTimeControllerConformanceTest
+ : public TestWithParam<webrtc::TimeMode> {};
+
+TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostOrderTest) {
+ std::unique_ptr<TimeController> time_controller =
+ CreateTimeController(GetParam());
+ std::unique_ptr<rtc::Thread> thread = time_controller->CreateThread("thread");
+
+ // Tasks on thread have to be executed in order in which they were
+ // posted.
+ ExecutionOrderKeeper execution_order;
+ thread->PostTask([&]() { execution_order.Executed(1); });
+ thread->PostTask([&]() { execution_order.Executed(2); });
+ time_controller->AdvanceTime(TimeDelta::Millis(100));
+ EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2}));
+ // Destroy `thread` before `execution_order` to be sure `execution_order`
+ // is not accessed on the posted task after it is destroyed.
+ thread = nullptr;
+}
+
+TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostDelayedOrderTest) {
+ std::unique_ptr<TimeController> time_controller =
+ CreateTimeController(GetParam());
+ std::unique_ptr<rtc::Thread> thread = time_controller->CreateThread("thread");
+
+ ExecutionOrderKeeper execution_order;
+ thread->PostDelayedTask([&]() { execution_order.Executed(2); },
+ TimeDelta::Millis(500));
+ thread->PostTask([&]() { execution_order.Executed(1); });
+ time_controller->AdvanceTime(TimeDelta::Millis(600));
+ EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2}));
+ // Destroy `thread` before `execution_order` to be sure `execution_order`
+ // is not accessed on the posted task after it is destroyed.
+ thread = nullptr;
+}
+
+TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostInvokeOrderTest) {
+ std::unique_ptr<TimeController> time_controller =
+ CreateTimeController(GetParam());
+ std::unique_ptr<rtc::Thread> thread = time_controller->CreateThread("thread");
+
+ // Tasks on thread have to be executed in order in which they were
+ // posted/invoked.
+ ExecutionOrderKeeper execution_order;
+ thread->PostTask([&]() { execution_order.Executed(1); });
+ thread->BlockingCall([&]() { execution_order.Executed(2); });
+ time_controller->AdvanceTime(TimeDelta::Millis(100));
+ EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2}));
+ // Destroy `thread` before `execution_order` to be sure `execution_order`
+ // is not accessed on the posted task after it is destroyed.
+ thread = nullptr;
+}
+
+TEST_P(SimulatedRealTimeControllerConformanceTest,
+ ThreadPostInvokeFromThreadOrderTest) {
+ std::unique_ptr<TimeController> time_controller =
+ CreateTimeController(GetParam());
+ std::unique_ptr<rtc::Thread> thread = time_controller->CreateThread("thread");
+
+ // If task is invoked from thread X on thread X it has to be executed
+ // immediately.
+ ExecutionOrderKeeper execution_order;
+ thread->PostTask([&]() {
+ thread->PostTask([&]() { execution_order.Executed(2); });
+ thread->BlockingCall([&]() { execution_order.Executed(1); });
+ });
+ time_controller->AdvanceTime(TimeDelta::Millis(100));
+ EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2}));
+ // Destroy `thread` before `execution_order` to be sure `execution_order`
+ // is not accessed on the posted task after it is destroyed.
+ thread = nullptr;
+}
+
+TEST_P(SimulatedRealTimeControllerConformanceTest,
+ TaskQueuePostEventWaitOrderTest) {
+ std::unique_ptr<TimeController> time_controller =
+ CreateTimeController(GetParam());
+ auto task_queue = time_controller->GetTaskQueueFactory()->CreateTaskQueue(
+ "task_queue", webrtc::TaskQueueFactory::Priority::NORMAL);
+
+ // Tasks on thread have to be executed in order in which they were
+ // posted/invoked.
+ ExecutionOrderKeeper execution_order;
+ rtc::Event event;
+ task_queue->PostTask([&]() { execution_order.Executed(1); });
+ task_queue->PostTask([&]() {
+ execution_order.Executed(2);
+ event.Set();
+ });
+ EXPECT_TRUE(event.Wait(/*give_up_after=*/TimeDelta::Millis(100)));
+ time_controller->AdvanceTime(TimeDelta::Millis(100));
+ EXPECT_THAT(execution_order.order(), ElementsAreArray({1, 2}));
+ // Destroy `task_queue` before `execution_order` to be sure `execution_order`
+ // is not accessed on the posted task after it is destroyed.
+ task_queue = nullptr;
+}
+
+INSTANTIATE_TEST_SUITE_P(ConformanceTest,
+ SimulatedRealTimeControllerConformanceTest,
+ Values(TimeMode::kRealTime, TimeMode::kSimulated),
+ ParamsToString);
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/vcm_capturer.cc b/third_party/libwebrtc/test/vcm_capturer.cc
new file mode 100644
index 0000000000..e02fc722b2
--- /dev/null
+++ b/third_party/libwebrtc/test/vcm_capturer.cc
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/vcm_capturer.h"
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "modules/video_capture/video_capture_factory.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace test {
+
+VcmCapturer::VcmCapturer() : vcm_(nullptr) {}
+
+bool VcmCapturer::Init(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index) {
+ std::unique_ptr<VideoCaptureModule::DeviceInfo> device_info(
+ VideoCaptureFactory::CreateDeviceInfo());
+
+ char device_name[256];
+ char unique_name[256];
+ if (device_info->GetDeviceName(static_cast<uint32_t>(capture_device_index),
+ device_name, sizeof(device_name), unique_name,
+ sizeof(unique_name)) != 0) {
+ Destroy();
+ return false;
+ }
+
+ vcm_ = webrtc::VideoCaptureFactory::Create(unique_name);
+ if (!vcm_) {
+ return false;
+ }
+ vcm_->RegisterCaptureDataCallback(this);
+
+ device_info->GetCapability(vcm_->CurrentDeviceName(), 0, capability_);
+
+ capability_.width = static_cast<int32_t>(width);
+ capability_.height = static_cast<int32_t>(height);
+ capability_.maxFPS = static_cast<int32_t>(target_fps);
+ capability_.videoType = VideoType::kI420;
+
+ if (vcm_->StartCapture(capability_) != 0) {
+ Destroy();
+ return false;
+ }
+
+ RTC_CHECK(vcm_->CaptureStarted());
+
+ return true;
+}
+
+VcmCapturer* VcmCapturer::Create(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index) {
+ std::unique_ptr<VcmCapturer> vcm_capturer(new VcmCapturer());
+ if (!vcm_capturer->Init(width, height, target_fps, capture_device_index)) {
+ RTC_LOG(LS_WARNING) << "Failed to create VcmCapturer(w = " << width
+ << ", h = " << height << ", fps = " << target_fps
+ << ")";
+ return nullptr;
+ }
+ return vcm_capturer.release();
+}
+
+void VcmCapturer::Destroy() {
+ if (!vcm_)
+ return;
+
+ vcm_->StopCapture();
+ vcm_->DeRegisterCaptureDataCallback(this);
+ // Release reference to VCM.
+ vcm_ = nullptr;
+}
+
+VcmCapturer::~VcmCapturer() {
+ Destroy();
+}
+
+void VcmCapturer::OnFrame(const VideoFrame& frame) {
+ TestVideoCapturer::OnFrame(frame);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/vcm_capturer.h b/third_party/libwebrtc/test/vcm_capturer.h
new file mode 100644
index 0000000000..5418dc9596
--- /dev/null
+++ b/third_party/libwebrtc/test/vcm_capturer.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_VCM_CAPTURER_H_
+#define TEST_VCM_CAPTURER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/scoped_refptr.h"
+#include "modules/video_capture/video_capture.h"
+#include "test/test_video_capturer.h"
+
+namespace webrtc {
+namespace test {
+
+class VcmCapturer : public TestVideoCapturer,
+ public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ static VcmCapturer* Create(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index);
+ virtual ~VcmCapturer();
+
+ void OnFrame(const VideoFrame& frame) override;
+
+ private:
+ VcmCapturer();
+ bool Init(size_t width,
+ size_t height,
+ size_t target_fps,
+ size_t capture_device_index);
+ void Destroy();
+
+ rtc::scoped_refptr<VideoCaptureModule> vcm_;
+ VideoCaptureCapability capability_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VCM_CAPTURER_H_
diff --git a/third_party/libwebrtc/test/video_codec_settings.h b/third_party/libwebrtc/test/video_codec_settings.h
new file mode 100644
index 0000000000..5ef4ed3e4a
--- /dev/null
+++ b/third_party/libwebrtc/test/video_codec_settings.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_VIDEO_CODEC_SETTINGS_H_
+#define TEST_VIDEO_CODEC_SETTINGS_H_
+
+#include "api/video_codecs/video_encoder.h"
+
+namespace webrtc {
+namespace test {
+
+const uint16_t kTestWidth = 352;
+const uint16_t kTestHeight = 288;
+const uint32_t kTestFrameRate = 30;
+const unsigned int kTestMinBitrateKbps = 30;
+const unsigned int kTestStartBitrateKbps = 300;
+const uint8_t kTestPayloadType = 100;
+const int64_t kTestTimingFramesDelayMs = 200;
+const uint16_t kTestOutlierFrameSizePercent = 250;
+
+static void CodecSettings(VideoCodecType codec_type, VideoCodec* settings) {
+ *settings = {};
+
+ settings->width = kTestWidth;
+ settings->height = kTestHeight;
+
+ settings->startBitrate = kTestStartBitrateKbps;
+ settings->maxBitrate = 0;
+ settings->minBitrate = kTestMinBitrateKbps;
+
+ settings->maxFramerate = kTestFrameRate;
+
+ settings->active = true;
+
+ settings->qpMax = 56; // See webrtcvideoengine.h.
+ settings->numberOfSimulcastStreams = 0;
+
+ settings->timing_frame_thresholds = {
+ kTestTimingFramesDelayMs,
+ kTestOutlierFrameSizePercent,
+ };
+
+ settings->codecType = codec_type;
+ switch (codec_type) {
+ case kVideoCodecVP8:
+ *(settings->VP8()) = VideoEncoder::GetDefaultVp8Settings();
+ return;
+ case kVideoCodecVP9:
+ *(settings->VP9()) = VideoEncoder::GetDefaultVp9Settings();
+ return;
+ case kVideoCodecH264:
+ // TODO(brandtr): Set `qpMax` here, when the OpenH264 wrapper supports it.
+ *(settings->H264()) = VideoEncoder::GetDefaultH264Settings();
+ return;
+ default:
+ return;
+ }
+}
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VIDEO_CODEC_SETTINGS_H_
diff --git a/third_party/libwebrtc/test/video_decoder_proxy_factory.h b/third_party/libwebrtc/test/video_decoder_proxy_factory.h
new file mode 100644
index 0000000000..6fd3805cd6
--- /dev/null
+++ b/third_party/libwebrtc/test/video_decoder_proxy_factory.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_VIDEO_DECODER_PROXY_FACTORY_H_
+#define TEST_VIDEO_DECODER_PROXY_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+namespace test {
+
+// A decoder factory with a single underlying VideoDecoder object, intended for
+// test purposes. Each call to CreateVideoDecoder returns a proxy for the same
+// decoder, typically an instance of FakeDecoder or MockEncoder.
+class VideoDecoderProxyFactory final : public VideoDecoderFactory {
+ public:
+ explicit VideoDecoderProxyFactory(VideoDecoder* decoder)
+ : decoder_(decoder) {}
+
+ // Unused by tests.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ RTC_DCHECK_NOTREACHED();
+ return {};
+ }
+
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override {
+ return std::make_unique<DecoderProxy>(decoder_);
+ }
+
+ private:
+ // Wrapper class, since CreateVideoDecoder needs to surrender
+ // ownership to the object it returns.
+ class DecoderProxy final : public VideoDecoder {
+ public:
+ explicit DecoderProxy(VideoDecoder* decoder) : decoder_(decoder) {}
+
+ private:
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override {
+ return decoder_->Decode(input_image, missing_frames, render_time_ms);
+ }
+ bool Configure(const Settings& settings) override {
+ return decoder_->Configure(settings);
+ }
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override {
+ return decoder_->RegisterDecodeCompleteCallback(callback);
+ }
+ int32_t Release() override { return decoder_->Release(); }
+ DecoderInfo GetDecoderInfo() const override {
+ return decoder_->GetDecoderInfo();
+ }
+ const char* ImplementationName() const override {
+ return decoder_->ImplementationName();
+ }
+
+ VideoDecoder* const decoder_;
+ };
+
+ VideoDecoder* const decoder_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VIDEO_DECODER_PROXY_FACTORY_H_
diff --git a/third_party/libwebrtc/test/video_encoder_nullable_proxy_factory.h b/third_party/libwebrtc/test/video_encoder_nullable_proxy_factory.h
new file mode 100644
index 0000000000..da81fff343
--- /dev/null
+++ b/third_party/libwebrtc/test/video_encoder_nullable_proxy_factory.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_VIDEO_ENCODER_NULLABLE_PROXY_FACTORY_H_
+#define TEST_VIDEO_ENCODER_NULLABLE_PROXY_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "test/video_encoder_proxy_factory.h"
+
+namespace webrtc {
+namespace test {
+
+class VideoEncoderNullableProxyFactory final : public VideoEncoderProxyFactory {
+ public:
+ explicit VideoEncoderNullableProxyFactory(
+ VideoEncoder* encoder,
+ EncoderSelectorInterface* encoder_selector)
+ : VideoEncoderProxyFactory(encoder, encoder_selector) {}
+
+ ~VideoEncoderNullableProxyFactory() override = default;
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override {
+ if (!encoder_) {
+ return nullptr;
+ }
+ return VideoEncoderProxyFactory::CreateVideoEncoder(format);
+ }
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VIDEO_ENCODER_NULLABLE_PROXY_FACTORY_H_
diff --git a/third_party/libwebrtc/test/video_encoder_proxy_factory.h b/third_party/libwebrtc/test/video_encoder_proxy_factory.h
new file mode 100644
index 0000000000..cc485e993a
--- /dev/null
+++ b/third_party/libwebrtc/test/video_encoder_proxy_factory.h
@@ -0,0 +1,158 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_VIDEO_ENCODER_PROXY_FACTORY_H_
+#define TEST_VIDEO_ENCODER_PROXY_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+const VideoEncoder::Capabilities kCapabilities(false);
+}
+
+// An encoder factory with a single underlying VideoEncoder object,
+// intended for test purposes. Each call to CreateVideoEncoder returns
+// a proxy for the same encoder, typically an instance of FakeEncoder.
+class VideoEncoderProxyFactory : public VideoEncoderFactory {
+ public:
+ explicit VideoEncoderProxyFactory(VideoEncoder* encoder)
+ : VideoEncoderProxyFactory(encoder, nullptr) {}
+
+ explicit VideoEncoderProxyFactory(VideoEncoder* encoder,
+ EncoderSelectorInterface* encoder_selector)
+ : encoder_(encoder),
+ encoder_selector_(encoder_selector),
+ num_simultaneous_encoder_instances_(0),
+ max_num_simultaneous_encoder_instances_(0) {
+ }
+
+ // Unused by tests.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ RTC_DCHECK_NOTREACHED();
+ return {};
+ }
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override {
+ ++num_simultaneous_encoder_instances_;
+ max_num_simultaneous_encoder_instances_ =
+ std::max(max_num_simultaneous_encoder_instances_,
+ num_simultaneous_encoder_instances_);
+ return std::make_unique<EncoderProxy>(encoder_, this);
+ }
+
+ std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector()
+ const override {
+ if (encoder_selector_ != nullptr) {
+ return std::make_unique<EncoderSelectorProxy>(encoder_selector_);
+ }
+
+ return nullptr;
+ }
+
+ int GetMaxNumberOfSimultaneousEncoderInstances() {
+ return max_num_simultaneous_encoder_instances_;
+ }
+
+ protected:
+ void OnDestroyVideoEncoder() {
+ RTC_CHECK_GT(num_simultaneous_encoder_instances_, 0);
+ --num_simultaneous_encoder_instances_;
+ }
+
+ // Wrapper class, since CreateVideoEncoder needs to surrender
+ // ownership to the object it returns.
+ class EncoderProxy final : public VideoEncoder {
+ public:
+ explicit EncoderProxy(VideoEncoder* encoder,
+ VideoEncoderProxyFactory* encoder_factory)
+ : encoder_(encoder), encoder_factory_(encoder_factory) {}
+ ~EncoderProxy() { encoder_factory_->OnDestroyVideoEncoder(); }
+
+ private:
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override {
+ encoder_->SetFecControllerOverride(fec_controller_override);
+ }
+
+ int32_t Encode(const VideoFrame& input_image,
+ const std::vector<VideoFrameType>* frame_types) override {
+ return encoder_->Encode(input_image, frame_types);
+ }
+
+ int32_t InitEncode(const VideoCodec* config,
+ const Settings& settings) override {
+ return encoder_->InitEncode(config, settings);
+ }
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override {
+ return encoder_->RegisterEncodeCompleteCallback(callback);
+ }
+
+ int32_t Release() override { return encoder_->Release(); }
+
+ void SetRates(const RateControlParameters& parameters) override {
+ encoder_->SetRates(parameters);
+ }
+
+ VideoEncoder::EncoderInfo GetEncoderInfo() const override {
+ return encoder_->GetEncoderInfo();
+ }
+
+ VideoEncoder* const encoder_;
+ VideoEncoderProxyFactory* const encoder_factory_;
+ };
+
+ class EncoderSelectorProxy final : public EncoderSelectorInterface {
+ public:
+ explicit EncoderSelectorProxy(EncoderSelectorInterface* encoder_selector)
+ : encoder_selector_(encoder_selector) {}
+
+ void OnCurrentEncoder(const SdpVideoFormat& format) override {
+ encoder_selector_->OnCurrentEncoder(format);
+ }
+
+ absl::optional<SdpVideoFormat> OnAvailableBitrate(
+ const DataRate& rate) override {
+ return encoder_selector_->OnAvailableBitrate(rate);
+ }
+
+ absl::optional<SdpVideoFormat> OnResolutionChange(
+ const RenderResolution& resolution) override {
+ return encoder_selector_->OnResolutionChange(resolution);
+ }
+
+ absl::optional<SdpVideoFormat> OnEncoderBroken() override {
+ return encoder_selector_->OnEncoderBroken();
+ }
+
+ private:
+ EncoderSelectorInterface* const encoder_selector_;
+ };
+
+ VideoEncoder* const encoder_;
+ EncoderSelectorInterface* const encoder_selector_;
+
+ int num_simultaneous_encoder_instances_;
+ int max_num_simultaneous_encoder_instances_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VIDEO_ENCODER_PROXY_FACTORY_H_
diff --git a/third_party/libwebrtc/test/video_renderer.cc b/third_party/libwebrtc/test/video_renderer.cc
new file mode 100644
index 0000000000..75ab72a883
--- /dev/null
+++ b/third_party/libwebrtc/test/video_renderer.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/video_renderer.h"
+
+// TODO(pbos): Android renderer
+
+namespace webrtc {
+namespace test {
+
+class NullRenderer : public VideoRenderer {
+ void OnFrame(const VideoFrame& video_frame) override {}
+};
+
+VideoRenderer* VideoRenderer::Create(const char* window_title,
+ size_t width,
+ size_t height) {
+ VideoRenderer* renderer = CreatePlatformRenderer(window_title, width, height);
+ if (renderer != nullptr)
+ return renderer;
+
+ return new NullRenderer();
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/video_renderer.h b/third_party/libwebrtc/test/video_renderer.h
new file mode 100644
index 0000000000..9e580f6f59
--- /dev/null
+++ b/third_party/libwebrtc/test/video_renderer.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_VIDEO_RENDERER_H_
+#define TEST_VIDEO_RENDERER_H_
+
+#include <stddef.h>
+
+#include "api/video/video_sink_interface.h"
+
+namespace webrtc {
+class VideoFrame;
+
+namespace test {
+class VideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ // Creates a platform-specific renderer if possible, or a null implementation
+ // if failing.
+ static VideoRenderer* Create(const char* window_title,
+ size_t width,
+ size_t height);
+ // Returns a renderer rendering to a platform specific window if possible,
+ // NULL if none can be created.
+ // Creates a platform-specific renderer if possible, returns NULL if a
+ // platform renderer could not be created. This occurs, for instance, when
+ // running without an X environment on Linux.
+ static VideoRenderer* CreatePlatformRenderer(const char* window_title,
+ size_t width,
+ size_t height);
+ virtual ~VideoRenderer() {}
+
+ protected:
+ VideoRenderer() {}
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_VIDEO_RENDERER_H_
diff --git a/third_party/libwebrtc/test/win/d3d_renderer.cc b/third_party/libwebrtc/test/win/d3d_renderer.cc
new file mode 100644
index 0000000000..5ba90fad78
--- /dev/null
+++ b/third_party/libwebrtc/test/win/d3d_renderer.cc
@@ -0,0 +1,206 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "test/win/d3d_renderer.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace test {
+
+#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ | D3DFVF_TEX1)
+
+struct D3dCustomVertex {
+ float x, y, z;
+ float u, v;
+};
+
+const char kD3DClassName[] = "d3d_renderer";
+
+VideoRenderer* VideoRenderer::CreatePlatformRenderer(const char* window_title,
+ size_t width,
+ size_t height) {
+ return D3dRenderer::Create(window_title, width, height);
+}
+
+D3dRenderer::D3dRenderer(size_t width, size_t height)
+ : width_(width),
+ height_(height),
+ hwnd_(NULL),
+ d3d_(nullptr),
+ d3d_device_(nullptr),
+ texture_(nullptr),
+ vertex_buffer_(nullptr) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+}
+
+D3dRenderer::~D3dRenderer() {
+ Destroy();
+}
+
+LRESULT WINAPI D3dRenderer::WindowProc(HWND hwnd,
+ UINT msg,
+ WPARAM wparam,
+ LPARAM lparam) {
+ if (msg == WM_DESTROY || (msg == WM_CHAR && wparam == VK_RETURN)) {
+ PostQuitMessage(0);
+ return 0;
+ }
+
+ return DefWindowProcA(hwnd, msg, wparam, lparam);
+}
+
+void D3dRenderer::Destroy() {
+ texture_ = nullptr;
+ vertex_buffer_ = nullptr;
+ d3d_device_ = nullptr;
+ d3d_ = nullptr;
+
+ if (hwnd_ != NULL) {
+ DestroyWindow(hwnd_);
+ RTC_DCHECK(!IsWindow(hwnd_));
+ hwnd_ = NULL;
+ }
+}
+
+bool D3dRenderer::Init(const char* window_title) {
+ hwnd_ = CreateWindowA(kD3DClassName, window_title, WS_OVERLAPPEDWINDOW, 0, 0,
+ static_cast<int>(width_), static_cast<int>(height_),
+ NULL, NULL, NULL, NULL);
+
+ if (hwnd_ == NULL) {
+ Destroy();
+ return false;
+ }
+
+ d3d_ = Direct3DCreate9(D3D_SDK_VERSION);
+ if (d3d_ == nullptr) {
+ Destroy();
+ return false;
+ }
+
+ D3DPRESENT_PARAMETERS d3d_params = {};
+
+ d3d_params.Windowed = TRUE;
+ d3d_params.SwapEffect = D3DSWAPEFFECT_COPY;
+
+ IDirect3DDevice9* d3d_device;
+ if (d3d_->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hwnd_,
+ D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3d_params,
+ &d3d_device) != D3D_OK) {
+ Destroy();
+ return false;
+ }
+ d3d_device_ = d3d_device;
+ d3d_device->Release();
+
+ IDirect3DVertexBuffer9* vertex_buffer;
+ const int kRectVertices = 4;
+ if (d3d_device_->CreateVertexBuffer(kRectVertices * sizeof(D3dCustomVertex),
+ 0, D3DFVF_CUSTOMVERTEX, D3DPOOL_MANAGED,
+ &vertex_buffer, NULL) != D3D_OK) {
+ Destroy();
+ return false;
+ }
+ vertex_buffer_ = vertex_buffer;
+ vertex_buffer->Release();
+
+ d3d_device_->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
+ d3d_device_->SetRenderState(D3DRS_LIGHTING, FALSE);
+ Resize(width_, height_);
+
+ ShowWindow(hwnd_, SW_SHOWNOACTIVATE);
+ d3d_device_->Present(NULL, NULL, NULL, NULL);
+
+ return true;
+}
+
+D3dRenderer* D3dRenderer::Create(const char* window_title,
+ size_t width,
+ size_t height) {
+ static ATOM wc_atom = 0;
+ if (wc_atom == 0) {
+ WNDCLASSA wc = {};
+
+ wc.style = CS_HREDRAW | CS_VREDRAW;
+ wc.lpfnWndProc = WindowProc;
+ wc.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wc.hbrBackground = reinterpret_cast<HBRUSH>(COLOR_WINDOW);
+ wc.lpszClassName = kD3DClassName;
+
+ wc_atom = RegisterClassA(&wc);
+ if (wc_atom == 0)
+ return nullptr;
+ }
+
+ D3dRenderer* d3d_renderer = new D3dRenderer(width, height);
+ if (!d3d_renderer->Init(window_title)) {
+ delete d3d_renderer;
+ return nullptr;
+ }
+
+ return d3d_renderer;
+}
+
+void D3dRenderer::Resize(size_t width, size_t height) {
+ width_ = width;
+ height_ = height;
+ IDirect3DTexture9* texture;
+
+ d3d_device_->CreateTexture(static_cast<UINT>(width_),
+ static_cast<UINT>(height_), 1, 0, D3DFMT_A8R8G8B8,
+ D3DPOOL_MANAGED, &texture, NULL);
+ texture_ = texture;
+ texture->Release();
+
+ // Vertices for the video frame to be rendered to.
+ static const D3dCustomVertex rect[] = {
+ {-1.0f, -1.0f, 0.0f, 0.0f, 1.0f},
+ {-1.0f, 1.0f, 0.0f, 0.0f, 0.0f},
+ {1.0f, -1.0f, 0.0f, 1.0f, 1.0f},
+ {1.0f, 1.0f, 0.0f, 1.0f, 0.0f},
+ };
+
+ void* buf_data;
+ if (vertex_buffer_->Lock(0, 0, &buf_data, 0) != D3D_OK)
+ return;
+
+ memcpy(buf_data, &rect, sizeof(rect));
+ vertex_buffer_->Unlock();
+}
+
+void D3dRenderer::OnFrame(const webrtc::VideoFrame& frame) {
+ if (static_cast<size_t>(frame.width()) != width_ ||
+ static_cast<size_t>(frame.height()) != height_) {
+ Resize(static_cast<size_t>(frame.width()),
+ static_cast<size_t>(frame.height()));
+ }
+
+ D3DLOCKED_RECT lock_rect;
+ if (texture_->LockRect(0, &lock_rect, NULL, 0) != D3D_OK)
+ return;
+
+ ConvertFromI420(frame, VideoType::kARGB, 0,
+ static_cast<uint8_t*>(lock_rect.pBits));
+ texture_->UnlockRect(0);
+
+ d3d_device_->BeginScene();
+ d3d_device_->SetFVF(D3DFVF_CUSTOMVERTEX);
+ d3d_device_->SetStreamSource(0, vertex_buffer_.get(), 0,
+ sizeof(D3dCustomVertex));
+ d3d_device_->SetTexture(0, texture_.get());
+ d3d_device_->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+ d3d_device_->EndScene();
+
+ d3d_device_->Present(NULL, NULL, NULL, NULL);
+}
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/test/win/d3d_renderer.h b/third_party/libwebrtc/test/win/d3d_renderer.h
new file mode 100644
index 0000000000..9e5e23c328
--- /dev/null
+++ b/third_party/libwebrtc/test/win/d3d_renderer.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef TEST_WIN_D3D_RENDERER_H_
+#define TEST_WIN_D3D_RENDERER_H_
+
+#include <Windows.h>
+#include <d3d9.h>
+#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
+
+#include "api/scoped_refptr.h"
+#include "test/video_renderer.h"
+
+namespace webrtc {
+namespace test {
+
+class D3dRenderer : public VideoRenderer {
+ public:
+ static D3dRenderer* Create(const char* window_title,
+ size_t width,
+ size_t height);
+ virtual ~D3dRenderer();
+
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ D3dRenderer(size_t width, size_t height);
+
+ static LRESULT WINAPI WindowProc(HWND hwnd,
+ UINT msg,
+ WPARAM wparam,
+ LPARAM lparam);
+ bool Init(const char* window_title);
+ void Resize(size_t width, size_t height);
+ void Destroy();
+
+ size_t width_, height_;
+
+ HWND hwnd_;
+ rtc::scoped_refptr<IDirect3D9> d3d_;
+ rtc::scoped_refptr<IDirect3DDevice9> d3d_device_;
+
+ rtc::scoped_refptr<IDirect3DTexture9> texture_;
+ rtc::scoped_refptr<IDirect3DVertexBuffer9> vertex_buffer_;
+};
+} // namespace test
+} // namespace webrtc
+
+#endif // TEST_WIN_D3D_RENDERER_H_