summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/modules
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/modules')
-rw-r--r--third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/BUILD.gn4
-rw-r--r--third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc26
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_coding_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_coding_module_typedefs_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_encoder_cng_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc4
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc3
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc3
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h4
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_config_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/default_neteq_factory_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/g711_c_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/g722_c_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/ilbc_c_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/ilbc_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_coding/isac_vad_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.cc30
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.h16
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/decision_logic_unittest.cc62
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h32
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h47
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.cc185
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.h8
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc99
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc7
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/neteq_unittest.cc20
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/packet_arrival_history.h5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.cc200
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.h61
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc468
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_decoding_test.cc4
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq/test/result_sink.cc5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/neteq_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/pcm16b_c_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/red_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/webrtc_cng_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_device/BUILD.gn2
-rw-r--r--third_party/libwebrtc/modules/audio_device/audio_device_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_device/include/audio_device.h4
-rw-r--r--third_party/libwebrtc/modules/audio_device/include/fake_audio_device.h4
-rw-r--r--third_party/libwebrtc/modules/audio_mixer/audio_frame_manipulator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/BUILD.gn5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn/moz.build4
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/aec3_common_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/aec3_fft_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/aec3_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/fft_data_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/matched_filter_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/render_buffer_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec3/vector_math_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump/BUILD.gn1
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_impl.h3
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump/capture_stream_info.h3
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/aecm/aecm_core_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc/agc_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/common_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/cpu_features_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/fixed_digital_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/gain_applier_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/gain_map_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_auto_correlation_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_common_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_layers_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_lp_residual_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_pitch_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_ring_buffer_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_sequence_buffer_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn/moz.build4
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/api_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/apm_logging_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_buffer_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_frame_view_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_processing_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h1
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc2
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_processing_statistics_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc6
-rw-r--r--third_party/libwebrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/gain_controller2_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/high_pass_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/include/audio_processing.h6
-rw-r--r--third_party/libwebrtc/modules/audio_processing/ns/ns_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/optionally_built_submodule_creators_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/rms_level_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/test/aec_dump_based_simulator.h3
-rw-r--r--third_party/libwebrtc/modules/audio_processing/test/debug_dump_replayer.h4
-rw-r--r--third_party/libwebrtc/modules/audio_processing/test/protobuf_utils.h4
-rw-r--r--third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_api_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_impl_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/transient/voice_probability_delay_unit_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/utility/cascaded_biquad_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/utility/legacy_delay_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/utility/pffft_wrapper_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/audio_processing/vad/vad_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/congestion_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/estimators_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/link_capacity_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc184
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h19
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc407
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc30
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h4
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bwe_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/rtp/control_handler_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/congestion_controller/rtp/transport_feedback_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc1
-rw-r--r--third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc28
-rw-r--r--third_party/libwebrtc/modules/desktop_capture/mac/desktop_frame_provider.h2
-rw-r--r--third_party/libwebrtc/modules/desktop_capture/mac/screen_capturer_mac.mm4
-rw-r--r--third_party/libwebrtc/modules/desktop_capture/win/dxgi_duplicator_controller.h2
-rw-r--r--third_party/libwebrtc/modules/module_api_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/module_api_public_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/module_fec_api_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/pacing/interval_budget_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/pacing/pacing_controller.cc2
-rw-r--r--third_party/libwebrtc/modules/pacing/pacing_controller.h5
-rw-r--r--third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc144
-rw-r--r--third_party/libwebrtc/modules/pacing/pacing_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc34
-rw-r--r--third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h31
-rw-r--r--third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc44
-rw-r--r--third_party/libwebrtc/modules/portal/pipewire_utils.h75
-rw-r--r--third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h2
-rw-r--r--third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc18
-rw-r--r--third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn11
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/leb128_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_format_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc18
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc121
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_format.cc9
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc3
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.cc350
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.h66
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc525
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc4
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc3
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.h3
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc15
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc50
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h3
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc30
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc26
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h5
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc23
-rw-r--r--third_party/libwebrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc3
-rw-r--r--third_party/libwebrtc/modules/third_party/fft/fft_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/third_party/g711/g711_3p_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/third_party/g722/g722_3p_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/utility/utility_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc16
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc8
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h2
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc18
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc46
-rw-r--r--third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc2
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture.h2
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/BUILD.gn67
-rw-r--r--third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/codec_globals_headers_gn/moz.build7
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc193
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h75
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc127
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc278
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h62
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc148
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc888
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc437
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h45
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc205
-rw-r--r--third_party/libwebrtc/modules/video_coding/encoded_frame_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/frame_helpers_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/generic_decoder.cc13
-rw-r--r--third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h16
-rw-r--r--third_party/libwebrtc/modules/video_coding/nack_requester_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/packet_buffer_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/svc/scalable_video_controller_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/decode_time_percentile_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/inter_frame_delay_variation_calculator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/jitter_estimator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/rtt_filter_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/timestamp_extrapolator_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/timing/timing_module_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/video_coding_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/video_coding_utility_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_libvpx_interface_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_vp8_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_vp8_scalability_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_vp8_temporal_layers_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_vp9_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/webrtc_vp9_helpers_gn/moz.build5
243 files changed, 2600 insertions, 4488 deletions
diff --git a/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build b/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build
index 347559a342..dfff987043 100644
--- a/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build
+++ b/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/BUILD.gn b/third_party/libwebrtc/modules/audio_coding/BUILD.gn
index 3e4d7e0c25..ddd1fd2656 100644
--- a/third_party/libwebrtc/modules/audio_coding/BUILD.gn
+++ b/third_party/libwebrtc/modules/audio_coding/BUILD.gn
@@ -618,7 +618,6 @@ rtc_library("audio_network_adaptor") {
"../../common_audio",
"../../logging:rtc_event_audio",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:logging",
"../../rtc_base:protobuf_utils",
"../../rtc_base:safe_conversions",
@@ -957,7 +956,6 @@ rtc_library("audio_coding_modules_tests_shared") {
"../../api/audio_codecs:builtin_audio_encoder_factory",
"../../api/neteq:neteq_api",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:ssl",
"../../rtc_base:stringutils",
"../../system_wrappers",
@@ -1644,6 +1642,7 @@ if (rtc_include_tests) {
"neteq/mock/mock_expand.h",
"neteq/mock/mock_histogram.h",
"neteq/mock/mock_neteq_controller.h",
+ "neteq/mock/mock_packet_arrival_history.h",
"neteq/mock/mock_packet_buffer.h",
"neteq/mock/mock_red_payload_splitter.h",
"neteq/mock/mock_statistics_calculator.h",
@@ -1717,7 +1716,6 @@ if (rtc_include_tests) {
"../../logging:rtc_event_audio",
"../../modules/rtp_rtcp:rtp_rtcp_format",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:macromagic",
"../../rtc_base:platform_thread",
"../../rtc_base:refcount",
diff --git a/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
index 210244154a..2d9ea91106 100644
--- a/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/acm2/audio_coding_module_unittest.cc
@@ -707,7 +707,7 @@ class AcmSenderBitExactnessNewApi : public AcmSenderBitExactnessOldApi {};
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80));
- Run(/*audio_checksum_ref=*/"69118ed438ac76252d023e0463819471",
+ Run(/*audio_checksum_ref=*/"3e43fd5d3c73a59e8118e68fbfafe2c7",
/*payload_checksum_ref=*/"c1edd36339ce0326cc4550041ad719a0",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -715,7 +715,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 1, 108, 160, 160));
- Run(/*audio_checksum_ref=*/"f95c87bdd33f631bcf80f4b19445bbd2",
+ Run(/*audio_checksum_ref=*/"608750138315cbab33d76d38e8367807",
/*payload_checksum_ref=*/"ad786526383178b08d80d6eee06e9bad",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -723,7 +723,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 1, 109, 320, 320));
- Run(/*audio_checksum_ref=*/"c50244419c5c3a2f04cc69a022c266a2",
+ Run(/*audio_checksum_ref=*/"02e9927ef5e4d2cd792a5df0bdee5e19",
/*payload_checksum_ref=*/"5ef82ea885e922263606c6fdbc49f651",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -731,7 +731,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 2, 111, 80, 80));
- Run(/*audio_checksum_ref=*/"4fccf4cc96f1e8e8de4b9fadf62ded9e",
+ Run(/*audio_checksum_ref=*/"4ff38de045b19f64de9c7e229ba36317",
/*payload_checksum_ref=*/"62ce5adb0d4965d0a52ec98ae7f98974",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput);
@@ -739,7 +739,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 2, 112, 160, 160));
- Run(/*audio_checksum_ref=*/"e15e388d9d4af8c02a59fe1552fedee3",
+ Run(/*audio_checksum_ref=*/"1ee35394cfca78ad6d55468441af36fa",
/*payload_checksum_ref=*/"41ca8edac4b8c71cd54fd9f25ec14870",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput);
@@ -747,7 +747,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_32000khz_10ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 2, 113, 320, 320));
- Run(/*audio_checksum_ref=*/"b240520c0d05003fde7a174ae5957286",
+ Run(/*audio_checksum_ref=*/"19cae34730a0f6a17cf4e76bf21b69d6",
/*payload_checksum_ref=*/"50e58502fb04421bf5b857dda4c96879",
/*expected_packets=*/100,
/*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput);
@@ -763,7 +763,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcmu_20ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcma_20ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 1, 8, 160, 160));
- Run(/*audio_checksum_ref=*/"47eb60e855eb12d1b0e6da9c975754a4",
+ Run(/*audio_checksum_ref=*/"ae259cab624095270b7369e53a7b53a3",
/*payload_checksum_ref=*/"6ad745e55aa48981bfc790d0eeef2dd1",
/*expected_packets=*/50,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -779,7 +779,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcmu_stereo_20ms) {
TEST_F(AcmSenderBitExactnessOldApi, Pcma_stereo_20ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 2, 118, 160, 160));
- Run(/*audio_checksum_ref=*/"a84d75e098d87ab6b260687eb4b612a2",
+ Run(/*audio_checksum_ref=*/"f2e81d2531a805c40e61da5106b50006",
/*payload_checksum_ref=*/"92b282c83efd20e7eeef52ba40842cf7",
/*expected_packets=*/50,
/*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput);
@@ -789,7 +789,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcma_stereo_20ms) {
defined(WEBRTC_ARCH_X86_64)
TEST_F(AcmSenderBitExactnessOldApi, Ilbc_30ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("ILBC", 8000, 1, 102, 240, 240));
- Run(/*audio_checksum_ref=*/"b14dba0de36efa5ec88a32c0b320b70f",
+ Run(/*audio_checksum_ref=*/"a739434bec8a754e9356ce2115603ce5",
/*payload_checksum_ref=*/"cfae2e9f6aba96e145f2bcdd5050ce78",
/*expected_packets=*/33,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -799,7 +799,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Ilbc_30ms) {
#if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64)
TEST_F(AcmSenderBitExactnessOldApi, G722_20ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 1, 9, 320, 160));
- Run(/*audio_checksum_ref=*/"f5264affff25cf2cbd2e1e8a5217f9a3",
+ Run(/*audio_checksum_ref=*/"b875d9a3e41f5470857bdff02e3b368f",
/*payload_checksum_ref=*/"fc68a87e1380614e658087cb35d5ca10",
/*expected_packets=*/50,
/*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput);
@@ -809,7 +809,7 @@ TEST_F(AcmSenderBitExactnessOldApi, G722_20ms) {
#if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64)
TEST_F(AcmSenderBitExactnessOldApi, G722_stereo_20ms) {
ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 2, 119, 320, 160));
- Run(/*audio_checksum_ref=*/"be0b8528ff9db3a2219f55ddd36faf7f",
+ Run(/*audio_checksum_ref=*/"02c427d73363b2f37853a0dd17fe1aba",
/*payload_checksum_ref=*/"66516152eeaa1e650ad94ff85f668dac",
/*expected_packets=*/50,
/*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput);
@@ -897,8 +897,8 @@ TEST_F(AcmSenderBitExactnessNewApi, OpusFromFormat_stereo_20ms_voip) {
ASSERT_NO_FATAL_FAILURE(SetUpTestExternalEncoder(
AudioEncoderOpus::MakeAudioEncoder(*config, 120), 120));
const std::string audio_maybe_sse =
- "1010e60ad34cee73c939edaf563d0593"
- "|c05b4523d4c3fad2bab96d2a56baa2d0";
+ "cb644fc17d9666a0f5986eef24818159"
+ "|4a74024473c7c729543c2790829b1e42";
const std::string payload_maybe_sse =
"ea48d94e43217793af9b7e15ece94e54"
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_coding_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_coding_gn/moz.build
index 4dad1217d0..88fa77a0e2 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_coding_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_coding_gn/moz.build
@@ -203,7 +203,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -213,10 +212,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_coding_module_typedefs_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_coding_module_typedefs_gn/moz.build
index 704026c845..851dd7b58e 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_coding_module_typedefs_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_coding_module_typedefs_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build
index bbb1557baa..e509916cfd 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_encoder_cng_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_encoder_cng_gn/moz.build
index 75153f3221..7829419065 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_encoder_cng_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_encoder_cng_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
index 42dd8a8786..793c73a380 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager.cc
@@ -24,18 +24,16 @@
#include "modules/audio_coding/audio_network_adaptor/frame_length_controller.h"
#include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h"
#include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
#if WEBRTC_ENABLE_PROTOBUF
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
#else
#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
+
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
index 3e6ecf6def..f399511757 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc
@@ -17,17 +17,14 @@
#include "modules/audio_coding/audio_network_adaptor/mock/mock_controller.h"
#include "modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h"
#include "rtc_base/fake_clock.h"
-#include "rtc_base/ignore_wundef.h"
#include "test/gtest.h"
#if WEBRTC_ENABLE_PROTOBUF
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
#else
#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc
index 2616706ee5..5ffbee219c 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc
@@ -14,18 +14,15 @@
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/system/file_wrapper.h"
#if WEBRTC_ENABLE_PROTOBUF
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump.pb.h"
#else
#include "modules/audio_coding/audio_network_adaptor/debug_dump.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h
index 8fdf2f7728..fd3a64dbb1 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h
@@ -15,16 +15,14 @@
#include "modules/audio_coding/audio_network_adaptor/controller.h"
#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/system/file_wrapper.h"
+
#if WEBRTC_ENABLE_PROTOBUF
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h"
#else
#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_config_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_config_gn/moz.build
index b9d3c55453..de87e8b033 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_config_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_config_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_gn/moz.build
index 7d446965f1..8a371a9aaf 100644
--- a/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/audio_network_adaptor_gn/moz.build
@@ -209,7 +209,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -219,10 +218,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/default_neteq_factory_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/default_neteq_factory_gn/moz.build
index aea0a80ed4..d7928549d7 100644
--- a/third_party/libwebrtc/modules/audio_coding/default_neteq_factory_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/default_neteq_factory_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/g711_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g711_c_gn/moz.build
index 575478702e..bedb8fc477 100644
--- a/third_party/libwebrtc/modules/audio_coding/g711_c_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/g711_c_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build
index fa25fde0bd..103d89c6d8 100644
--- a/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build
@@ -196,7 +196,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -206,10 +205,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/g722_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g722_c_gn/moz.build
index 4821c2bd82..48137ada85 100644
--- a/third_party/libwebrtc/modules/audio_coding/g722_c_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/g722_c_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build
index 0a56f32af0..81eb870466 100644
--- a/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build
@@ -196,7 +196,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -206,10 +205,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/ilbc_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/ilbc_c_gn/moz.build
index 43d69c7662..d3aa4e0018 100644
--- a/third_party/libwebrtc/modules/audio_coding/ilbc_c_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/ilbc_c_gn/moz.build
@@ -267,7 +267,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -277,10 +276,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/ilbc_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/ilbc_gn/moz.build
index c4b3b4cd13..9a397a1fdc 100644
--- a/third_party/libwebrtc/modules/audio_coding/ilbc_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/ilbc_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn/moz.build
index 4f4a5c0e7e..fdfc4fc855 100644
--- a/third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/isac_bwinfo_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/isac_vad_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/isac_vad_gn/moz.build
index a5cc52279a..1b599c5e51 100644
--- a/third_party/libwebrtc/modules/audio_coding/isac_vad_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/isac_vad_gn/moz.build
@@ -187,7 +187,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -197,10 +196,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build
index 78b7338ddd..b884cb8d99 100644
--- a/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.cc b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.cc
index fd4f2f5a20..6648fd8709 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -95,10 +95,14 @@ DecisionLogic::DecisionLogic(NetEqController::Config config)
DecisionLogic::DecisionLogic(
NetEqController::Config config,
std::unique_ptr<DelayManager> delay_manager,
- std::unique_ptr<BufferLevelFilter> buffer_level_filter)
+ std::unique_ptr<BufferLevelFilter> buffer_level_filter,
+ std::unique_ptr<PacketArrivalHistory> packet_arrival_history)
: delay_manager_(std::move(delay_manager)),
buffer_level_filter_(std::move(buffer_level_filter)),
- packet_arrival_history_(config_.packet_history_size_ms),
+ packet_arrival_history_(packet_arrival_history
+ ? std::move(packet_arrival_history)
+ : std::make_unique<PacketArrivalHistory>(
+ config_.packet_history_size_ms)),
tick_timer_(config.tick_timer),
disallow_time_stretching_(!config.allow_time_stretching),
timescale_countdown_(
@@ -115,7 +119,7 @@ void DecisionLogic::SoftReset() {
time_stretched_cn_samples_ = 0;
delay_manager_->Reset();
buffer_level_filter_->Reset();
- packet_arrival_history_.Reset();
+ packet_arrival_history_->Reset();
}
void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) {
@@ -124,7 +128,7 @@ void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) {
fs_hz == 48000);
sample_rate_khz_ = fs_hz / 1000;
output_size_samples_ = output_size_samples;
- packet_arrival_history_.set_sample_rate(fs_hz);
+ packet_arrival_history_->set_sample_rate(fs_hz);
}
NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status,
@@ -218,15 +222,15 @@ absl::optional<int> DecisionLogic::PacketArrived(
delay_manager_->SetPacketAudioLength(packet_length_samples_ * 1000 / fs_hz);
}
int64_t time_now_ms = tick_timer_->ticks() * tick_timer_->ms_per_tick();
- packet_arrival_history_.Insert(info.main_timestamp, time_now_ms);
- if (packet_arrival_history_.size() < 2) {
+ packet_arrival_history_->Insert(info.main_timestamp, time_now_ms);
+ if (packet_arrival_history_->size() < 2) {
// No meaningful delay estimate unless at least 2 packets have arrived.
return absl::nullopt;
}
int arrival_delay_ms =
- packet_arrival_history_.GetDelayMs(info.main_timestamp, time_now_ms);
+ packet_arrival_history_->GetDelayMs(info.main_timestamp, time_now_ms);
bool reordered =
- !packet_arrival_history_.IsNewestRtpTimestamp(info.main_timestamp);
+ !packet_arrival_history_->IsNewestRtpTimestamp(info.main_timestamp);
delay_manager_->Update(arrival_delay_ms, reordered);
return arrival_delay_ms;
}
@@ -306,10 +310,10 @@ NetEq::Operation DecisionLogic::ExpectedPacketAvailable(
!status.play_dtmf) {
if (config_.enable_stable_delay_mode) {
const int playout_delay_ms = GetPlayoutDelayMs(status);
- const int low_limit = TargetLevelMs();
- const int high_limit = low_limit +
- packet_arrival_history_.GetMaxDelayMs() +
- kDelayAdjustmentGranularityMs;
+ const int64_t low_limit = TargetLevelMs();
+ const int64_t high_limit = low_limit +
+ packet_arrival_history_->GetMaxDelayMs() +
+ kDelayAdjustmentGranularityMs;
if (playout_delay_ms >= high_limit * 4) {
return NetEq::Operation::kFastAccelerate;
}
@@ -460,7 +464,7 @@ int DecisionLogic::GetPlayoutDelayMs(
NetEqController::NetEqStatus status) const {
uint32_t playout_timestamp =
status.target_timestamp - status.sync_buffer_samples;
- return packet_arrival_history_.GetDelayMs(
+ return packet_arrival_history_->GetDelayMs(
playout_timestamp, tick_timer_->ticks() * tick_timer_->ms_per_tick());
}
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.h b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.h
index d96fbecd6a..a6b02c69cd 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.h
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic.h
@@ -27,9 +27,11 @@ namespace webrtc {
class DecisionLogic : public NetEqController {
public:
DecisionLogic(NetEqController::Config config);
- DecisionLogic(NetEqController::Config config,
- std::unique_ptr<DelayManager> delay_manager,
- std::unique_ptr<BufferLevelFilter> buffer_level_filter);
+ DecisionLogic(
+ NetEqController::Config config,
+ std::unique_ptr<DelayManager> delay_manager,
+ std::unique_ptr<BufferLevelFilter> buffer_level_filter,
+ std::unique_ptr<PacketArrivalHistory> packet_arrival_history = nullptr);
~DecisionLogic() override;
@@ -154,16 +156,16 @@ class DecisionLogic : public NetEqController {
struct Config {
Config();
- bool enable_stable_delay_mode = false;
- bool combine_concealment_decision = false;
+ bool enable_stable_delay_mode = true;
+ bool combine_concealment_decision = true;
int deceleration_target_level_offset_ms = 85;
int packet_history_size_ms = 2000;
- absl::optional<int> cng_timeout_ms;
+ absl::optional<int> cng_timeout_ms = 1000;
};
Config config_;
std::unique_ptr<DelayManager> delay_manager_;
std::unique_ptr<BufferLevelFilter> buffer_level_filter_;
- PacketArrivalHistory packet_arrival_history_;
+ std::unique_ptr<PacketArrivalHistory> packet_arrival_history_;
const TickTimer* tick_timer_;
int sample_rate_khz_;
size_t output_size_samples_;
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
index 97e20dd883..9e9902af50 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
@@ -18,6 +18,7 @@
#include "modules/audio_coding/neteq/delay_manager.h"
#include "modules/audio_coding/neteq/mock/mock_buffer_level_filter.h"
#include "modules/audio_coding/neteq/mock/mock_delay_manager.h"
+#include "modules/audio_coding/neteq/mock/mock_packet_arrival_history.h"
#include "test/field_trial.h"
#include "test/gtest.h"
@@ -47,6 +48,7 @@ NetEqController::NetEqStatus CreateNetEqStatus(NetEq::Mode last_mode,
return status;
}
+using ::testing::_;
using ::testing::Return;
} // namespace
@@ -54,8 +56,6 @@ using ::testing::Return;
class DecisionLogicTest : public ::testing::Test {
protected:
DecisionLogicTest() {
- test::ScopedFieldTrials trials(
- "WebRTC-Audio-NetEqDecisionLogicConfig/cng_timeout_ms:1000/");
NetEqController::Config config;
config.tick_timer = &tick_timer_;
config.allow_time_stretching = true;
@@ -64,8 +64,11 @@ class DecisionLogicTest : public ::testing::Test {
mock_delay_manager_ = delay_manager.get();
auto buffer_level_filter = std::make_unique<MockBufferLevelFilter>();
mock_buffer_level_filter_ = buffer_level_filter.get();
+ auto packet_arrival_history = std::make_unique<MockPacketArrivalHistory>();
+ mock_packet_arrival_history_ = packet_arrival_history.get();
decision_logic_ = std::make_unique<DecisionLogic>(
- config, std::move(delay_manager), std::move(buffer_level_filter));
+ config, std::move(delay_manager), std::move(buffer_level_filter),
+ std::move(packet_arrival_history));
decision_logic_->SetSampleRate(kSampleRate, kOutputSizeSamples);
}
@@ -73,13 +76,16 @@ class DecisionLogicTest : public ::testing::Test {
std::unique_ptr<DecisionLogic> decision_logic_;
MockDelayManager* mock_delay_manager_;
MockBufferLevelFilter* mock_buffer_level_filter_;
+ MockPacketArrivalHistory* mock_packet_arrival_history_;
};
TEST_F(DecisionLogicTest, NormalOperation) {
EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
.WillRepeatedly(Return(100));
- EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level())
- .WillRepeatedly(Return(90 * kSamplesPerMs));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_, _))
+ .WillRepeatedly(Return(100));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs())
+ .WillRepeatedly(Return(0));
bool reset_decoder = false;
tick_timer_.Increment(kMinTimescaleInterval + 1);
@@ -92,8 +98,10 @@ TEST_F(DecisionLogicTest, NormalOperation) {
TEST_F(DecisionLogicTest, Accelerate) {
EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
.WillRepeatedly(Return(100));
- EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level())
- .WillRepeatedly(Return(110 * kSamplesPerMs));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_, _))
+ .WillRepeatedly(Return(150));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs())
+ .WillRepeatedly(Return(0));
bool reset_decoder = false;
tick_timer_.Increment(kMinTimescaleInterval + 1);
@@ -106,8 +114,10 @@ TEST_F(DecisionLogicTest, Accelerate) {
TEST_F(DecisionLogicTest, FastAccelerate) {
EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
.WillRepeatedly(Return(100));
- EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level())
- .WillRepeatedly(Return(400 * kSamplesPerMs));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_, _))
+ .WillRepeatedly(Return(500));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs())
+ .WillRepeatedly(Return(0));
bool reset_decoder = false;
tick_timer_.Increment(kMinTimescaleInterval + 1);
@@ -120,8 +130,10 @@ TEST_F(DecisionLogicTest, FastAccelerate) {
TEST_F(DecisionLogicTest, PreemptiveExpand) {
EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
.WillRepeatedly(Return(100));
- EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level())
- .WillRepeatedly(Return(50 * kSamplesPerMs));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_, _))
+ .WillRepeatedly(Return(50));
+ EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs())
+ .WillRepeatedly(Return(0));
bool reset_decoder = false;
tick_timer_.Increment(kMinTimescaleInterval + 1);
@@ -131,20 +143,6 @@ TEST_F(DecisionLogicTest, PreemptiveExpand) {
EXPECT_FALSE(reset_decoder);
}
-TEST_F(DecisionLogicTest, DecelerationTargetLevelOffset) {
- EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
- .WillRepeatedly(Return(500));
- EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level())
- .WillRepeatedly(Return(400 * kSamplesPerMs));
-
- bool reset_decoder = false;
- tick_timer_.Increment(kMinTimescaleInterval + 1);
- EXPECT_EQ(decision_logic_->GetDecision(
- CreateNetEqStatus(NetEq::Mode::kNormal, 400), &reset_decoder),
- NetEq::Operation::kPreemptiveExpand);
- EXPECT_FALSE(reset_decoder);
-}
-
TEST_F(DecisionLogicTest, PostponeDecodeAfterExpand) {
EXPECT_CALL(*mock_delay_manager_, TargetDelayMs())
.WillRepeatedly(Return(500));
@@ -170,7 +168,7 @@ TEST_F(DecisionLogicTest, TimeStrechComfortNoise) {
{
bool reset_decoder = false;
// Below target window.
- auto status = CreateNetEqStatus(NetEq::Mode::kCodecInternalCng, 400);
+ auto status = CreateNetEqStatus(NetEq::Mode::kCodecInternalCng, 200);
status.generated_noise_samples = 400 * kSamplesPerMs;
status.next_packet->timestamp =
status.target_timestamp + 400 * kSamplesPerMs;
@@ -189,18 +187,6 @@ TEST_F(DecisionLogicTest, TimeStrechComfortNoise) {
EXPECT_EQ(decision_logic_->GetDecision(status, &reset_decoder),
NetEq::Operation::kNormal);
EXPECT_FALSE(reset_decoder);
-
- // The buffer level filter should be adjusted with the number of samples
- // that was skipped.
- int timestamp_leap = status.next_packet->timestamp -
- status.target_timestamp -
- status.generated_noise_samples;
- EXPECT_CALL(*mock_buffer_level_filter_,
- Update(400 * kSamplesPerMs, timestamp_leap));
- EXPECT_EQ(decision_logic_->GetDecision(
- CreateNetEqStatus(NetEq::Mode::kNormal, 400), &reset_decoder),
- NetEq::Operation::kNormal);
- EXPECT_FALSE(reset_decoder);
}
}
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h b/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h
new file mode 100644
index 0000000000..1b2080cd94
--- /dev/null
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_
+#define MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_
+
+#include "modules/audio_coding/neteq/packet_arrival_history.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockPacketArrivalHistory : public PacketArrivalHistory {
+ public:
+ MockPacketArrivalHistory() : PacketArrivalHistory(0) {}
+
+ MOCK_METHOD(int,
+ GetDelayMs,
+ (uint32_t rtp_timestamp, int64_t time_ms),
+ (const override));
+ MOCK_METHOD(int, GetMaxDelayMs, (), (const override));
+};
+
+} // namespace webrtc
+
+#endif // MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 48357ea466..fa44f606fc 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -18,39 +18,15 @@ namespace webrtc {
class MockPacketBuffer : public PacketBuffer {
public:
- MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer)
- : PacketBuffer(max_number_of_packets, tick_timer) {}
+ MockPacketBuffer(size_t max_number_of_packets,
+ const TickTimer* tick_timer,
+ StatisticsCalculator* stats)
+ : PacketBuffer(max_number_of_packets, tick_timer, stats) {}
~MockPacketBuffer() override { Die(); }
MOCK_METHOD(void, Die, ());
- MOCK_METHOD(void, Flush, (StatisticsCalculator * stats), (override));
- MOCK_METHOD(void,
- PartialFlush,
- (int target_level_ms,
- size_t sample_rate,
- size_t last_decoded_length,
- StatisticsCalculator* stats),
- (override));
+ MOCK_METHOD(void, Flush, (), (override));
MOCK_METHOD(bool, Empty, (), (const, override));
- MOCK_METHOD(int,
- InsertPacket,
- (Packet && packet,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms,
- const DecoderDatabase& decoder_database),
- (override));
- MOCK_METHOD(int,
- InsertPacketList,
- (PacketList * packet_list,
- const DecoderDatabase& decoder_database,
- absl::optional<uint8_t>* current_rtp_payload_type,
- absl::optional<uint8_t>* current_cng_rtp_payload_type,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms),
- (override));
+ MOCK_METHOD(int, InsertPacket, (Packet && packet), (override));
MOCK_METHOD(int,
NextTimestamp,
(uint32_t * next_timestamp),
@@ -61,19 +37,14 @@ class MockPacketBuffer : public PacketBuffer {
(const, override));
MOCK_METHOD(const Packet*, PeekNextPacket, (), (const, override));
MOCK_METHOD(absl::optional<Packet>, GetNextPacket, (), (override));
- MOCK_METHOD(int,
- DiscardNextPacket,
- (StatisticsCalculator * stats),
- (override));
+ MOCK_METHOD(int, DiscardNextPacket, (), (override));
MOCK_METHOD(void,
DiscardOldPackets,
- (uint32_t timestamp_limit,
- uint32_t horizon_samples,
- StatisticsCalculator* stats),
+ (uint32_t timestamp_limit, uint32_t horizon_samples),
(override));
MOCK_METHOD(void,
DiscardAllOldPackets,
- (uint32_t timestamp_limit, StatisticsCalculator* stats),
+ (uint32_t timestamp_limit),
(override));
MOCK_METHOD(size_t, NumPacketsInBuffer, (), (const, override));
};
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.cc b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.cc
index 52e8cbad3a..e5c8bf6c08 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -70,6 +70,62 @@ std::unique_ptr<NetEqController> CreateNetEqController(
return controller_factory.CreateNetEqController(config);
}
+void SetAudioFrameActivityAndType(bool vad_enabled,
+ NetEqImpl::OutputType type,
+ AudioFrame::VADActivity last_vad_activity,
+ AudioFrame* audio_frame) {
+ switch (type) {
+ case NetEqImpl::OutputType::kNormalSpeech: {
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ audio_frame->vad_activity_ = AudioFrame::kVadActive;
+ break;
+ }
+ case NetEqImpl::OutputType::kVadPassive: {
+ // This should only be reached if the VAD is enabled.
+ RTC_DCHECK(vad_enabled);
+ audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ break;
+ }
+ case NetEqImpl::OutputType::kCNG: {
+ audio_frame->speech_type_ = AudioFrame::kCNG;
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ break;
+ }
+ case NetEqImpl::OutputType::kPLC: {
+ audio_frame->speech_type_ = AudioFrame::kPLC;
+ audio_frame->vad_activity_ = last_vad_activity;
+ break;
+ }
+ case NetEqImpl::OutputType::kPLCCNG: {
+ audio_frame->speech_type_ = AudioFrame::kPLCCNG;
+ audio_frame->vad_activity_ = AudioFrame::kVadPassive;
+ break;
+ }
+ case NetEqImpl::OutputType::kCodecPLC: {
+ audio_frame->speech_type_ = AudioFrame::kCodecPLC;
+ audio_frame->vad_activity_ = last_vad_activity;
+ break;
+ }
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+ if (!vad_enabled) {
+ // Always set kVadUnknown when receive VAD is inactive.
+ audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
+ }
+}
+
+// Returns true if both payload types are known to the decoder database, and
+// have the same sample rate.
+bool EqualSampleRates(uint8_t pt1,
+ uint8_t pt2,
+ const DecoderDatabase& decoder_database) {
+ auto* di1 = decoder_database.GetDecoderInfo(pt1);
+ auto* di2 = decoder_database.GetDecoderInfo(pt2);
+ return di1 && di2 && di1->SampleRateHz() == di2->SampleRateHz();
+}
+
} // namespace
NetEqImpl::Dependencies::Dependencies(
@@ -84,8 +140,9 @@ NetEqImpl::Dependencies::Dependencies(
new DecoderDatabase(decoder_factory, config.codec_pair_id)),
dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)),
dtmf_tone_generator(new DtmfToneGenerator),
- packet_buffer(
- new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())),
+ packet_buffer(new PacketBuffer(config.max_packets_in_buffer,
+ tick_timer.get(),
+ stats.get())),
neteq_controller(
CreateNetEqController(controller_factory,
config.min_delay_ms,
@@ -182,54 +239,6 @@ void NetEqImpl::InsertEmptyPacket(const RTPHeader& rtp_header) {
controller_->RegisterEmptyPacket();
}
-namespace {
-void SetAudioFrameActivityAndType(bool vad_enabled,
- NetEqImpl::OutputType type,
- AudioFrame::VADActivity last_vad_activity,
- AudioFrame* audio_frame) {
- switch (type) {
- case NetEqImpl::OutputType::kNormalSpeech: {
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- audio_frame->vad_activity_ = AudioFrame::kVadActive;
- break;
- }
- case NetEqImpl::OutputType::kVadPassive: {
- // This should only be reached if the VAD is enabled.
- RTC_DCHECK(vad_enabled);
- audio_frame->speech_type_ = AudioFrame::kNormalSpeech;
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- break;
- }
- case NetEqImpl::OutputType::kCNG: {
- audio_frame->speech_type_ = AudioFrame::kCNG;
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- break;
- }
- case NetEqImpl::OutputType::kPLC: {
- audio_frame->speech_type_ = AudioFrame::kPLC;
- audio_frame->vad_activity_ = last_vad_activity;
- break;
- }
- case NetEqImpl::OutputType::kPLCCNG: {
- audio_frame->speech_type_ = AudioFrame::kPLCCNG;
- audio_frame->vad_activity_ = AudioFrame::kVadPassive;
- break;
- }
- case NetEqImpl::OutputType::kCodecPLC: {
- audio_frame->speech_type_ = AudioFrame::kCodecPLC;
- audio_frame->vad_activity_ = last_vad_activity;
- break;
- }
- default:
- RTC_DCHECK_NOTREACHED();
- }
- if (!vad_enabled) {
- // Always set kVadUnknown when receive VAD is inactive.
- audio_frame->vad_activity_ = AudioFrame::kVadUnknown;
- }
-}
-} // namespace
-
int NetEqImpl::GetAudio(AudioFrame* audio_frame,
bool* muted,
int* current_sample_rate_hz,
@@ -265,7 +274,7 @@ void NetEqImpl::SetCodecs(const std::map<int, SdpAudioFormat>& codecs) {
const std::vector<int> changed_payload_types =
decoder_database_->SetCodecs(codecs);
for (const int pt : changed_payload_types) {
- packet_buffer_->DiscardPacketsWithPayloadType(pt, stats_.get());
+ packet_buffer_->DiscardPacketsWithPayloadType(pt);
}
}
@@ -283,8 +292,7 @@ int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) {
MutexLock lock(&mutex_);
int ret = decoder_database_->Remove(rtp_payload_type);
if (ret == DecoderDatabase::kOK || ret == DecoderDatabase::kDecoderNotFound) {
- packet_buffer_->DiscardPacketsWithPayloadType(rtp_payload_type,
- stats_.get());
+ packet_buffer_->DiscardPacketsWithPayloadType(rtp_payload_type);
return kOK;
}
return kFail;
@@ -441,7 +449,7 @@ absl::optional<NetEq::DecoderFormat> NetEqImpl::GetDecoderFormat(
void NetEqImpl::FlushBuffers() {
MutexLock lock(&mutex_);
RTC_LOG(LS_VERBOSE) << "FlushBuffers";
- packet_buffer_->Flush(stats_.get());
+ packet_buffer_->Flush();
RTC_DCHECK(sync_buffer_.get());
RTC_DCHECK(expand_.get());
sync_buffer_->Flush();
@@ -542,7 +550,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header,
// the packet has been successfully inserted into the packet buffer.
// Flush the packet buffer and DTMF buffer.
- packet_buffer_->Flush(stats_.get());
+ packet_buffer_->Flush();
dtmf_buffer_->Flush();
// Update audio buffer timestamp.
@@ -681,26 +689,25 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header,
number_of_primary_packets);
}
- // Insert packets in buffer.
- const int target_level_ms = controller_->TargetLevelMs();
- const int ret = packet_buffer_->InsertPacketList(
- &parsed_packet_list, *decoder_database_, &current_rtp_payload_type_,
- &current_cng_rtp_payload_type_, stats_.get(), decoder_frame_length_,
- last_output_sample_rate_hz_, target_level_ms);
bool buffer_flush_occured = false;
- if (ret == PacketBuffer::kFlushed) {
+ for (Packet& packet : parsed_packet_list) {
+ if (MaybeChangePayloadType(packet.payload_type)) {
+ packet_buffer_->Flush();
+ buffer_flush_occured = true;
+ }
+ int return_val = packet_buffer_->InsertPacket(std::move(packet));
+ if (return_val == PacketBuffer::kFlushed) {
+ buffer_flush_occured = true;
+ } else if (return_val != PacketBuffer::kOK) {
+ // An error occurred.
+ return kOtherError;
+ }
+ }
+
+ if (buffer_flush_occured) {
// Reset DSP timestamp etc. if packet buffer flushed.
new_codec_ = true;
update_sample_rate_and_channels = true;
- buffer_flush_occured = true;
- } else if (ret == PacketBuffer::kPartialFlush) {
- // Forward sync buffer timestamp
- timestamp_ = packet_buffer_->PeekNextPacket()->timestamp;
- sync_buffer_->IncreaseEndTimestamp(timestamp_ -
- sync_buffer_->end_timestamp());
- buffer_flush_occured = true;
- } else if (ret != PacketBuffer::kOK) {
- return kOtherError;
}
if (first_packet_) {
@@ -767,6 +774,31 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header,
return 0;
}
+bool NetEqImpl::MaybeChangePayloadType(uint8_t payload_type) {
+ bool changed = false;
+ if (decoder_database_->IsComfortNoise(payload_type)) {
+ if (current_cng_rtp_payload_type_ &&
+ *current_cng_rtp_payload_type_ != payload_type) {
+ // New CNG payload type implies new codec type.
+ current_rtp_payload_type_ = absl::nullopt;
+ changed = true;
+ }
+ current_cng_rtp_payload_type_ = payload_type;
+ } else if (!decoder_database_->IsDtmf(payload_type)) {
+ // This must be speech.
+ if ((current_rtp_payload_type_ &&
+ *current_rtp_payload_type_ != payload_type) ||
+ (current_cng_rtp_payload_type_ &&
+ !EqualSampleRates(payload_type, *current_cng_rtp_payload_type_,
+ *decoder_database_))) {
+ current_cng_rtp_payload_type_ = absl::nullopt;
+ changed = true;
+ }
+ current_rtp_payload_type_ = payload_type;
+ }
+ return changed;
+}
+
int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame,
bool* muted,
absl::optional<Operation> action_override) {
@@ -1037,8 +1069,7 @@ int NetEqImpl::GetDecision(Operation* operation,
uint32_t end_timestamp = sync_buffer_->end_timestamp();
if (!new_codec_) {
const uint32_t five_seconds_samples = 5 * fs_hz_;
- packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples,
- stats_.get());
+ packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples);
}
const Packet* packet = packet_buffer_->PeekNextPacket();
@@ -1058,14 +1089,12 @@ int NetEqImpl::GetDecision(Operation* operation,
(end_timestamp >= packet->timestamp ||
end_timestamp + generated_noise_samples > packet->timestamp)) {
// Don't use this packet, discard it.
- if (packet_buffer_->DiscardNextPacket(stats_.get()) !=
- PacketBuffer::kOK) {
+ if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
RTC_DCHECK_NOTREACHED(); // Must be ok by design.
}
// Check buffer again.
if (!new_codec_) {
- packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_,
- stats_.get());
+ packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_);
}
packet = packet_buffer_->PeekNextPacket();
}
@@ -2024,7 +2053,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
// we could end up in the situation where we never decode anything, since
// all incoming packets are considered too old but the buffer will also
// never be flooded and flushed.
- packet_buffer_->DiscardAllOldPackets(timestamp_, stats_.get());
+ packet_buffer_->DiscardAllOldPackets(timestamp_);
}
return rtc::dchecked_cast<int>(extracted_samples);
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.h b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.h
index f27738bcbf..f8f2b06410 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.h
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -27,6 +27,7 @@
#include "modules/audio_coding/neteq/audio_multi_vector.h"
#include "modules/audio_coding/neteq/expand_uma_logger.h"
#include "modules/audio_coding/neteq/packet.h"
+#include "modules/audio_coding/neteq/packet_buffer.h"
#include "modules/audio_coding/neteq/random_vector.h"
#include "modules/audio_coding/neteq/statistics_calculator.h"
#include "rtc_base/synchronization/mutex.h"
@@ -46,7 +47,6 @@ class Expand;
class Merge;
class NackTracker;
class Normal;
-class PacketBuffer;
class RedPayloadSplitter;
class PostDecodeVad;
class PreemptiveExpand;
@@ -215,6 +215,12 @@ class NetEqImpl : public webrtc::NetEq {
rtc::ArrayView<const uint8_t> payload)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+ // Returns true if the payload type changed (this should be followed by
+ // resetting various state). Returns false if the current payload type is
+ // unknown or equal to `payload_type`.
+ bool MaybeChangePayloadType(uint8_t payload_type)
+ RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
+
// Delivers 10 ms of audio data. The data is written to `audio_frame`.
// Returns 0 on success, otherwise an error code.
int GetAudioInternal(AudioFrame* audio_frame,
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
index e61cd52502..8309dafb58 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -108,8 +108,8 @@ class NetEqImplTest : public ::testing::Test {
dtmf_tone_generator_ = deps.dtmf_tone_generator.get();
if (use_mock_packet_buffer_) {
- std::unique_ptr<MockPacketBuffer> mock(
- new MockPacketBuffer(config_.max_packets_in_buffer, tick_timer_));
+ std::unique_ptr<MockPacketBuffer> mock(new MockPacketBuffer(
+ config_.max_packets_in_buffer, tick_timer_, deps.stats.get()));
mock_packet_buffer_ = mock.get();
deps.packet_buffer = std::move(mock);
}
@@ -120,7 +120,6 @@ class NetEqImplTest : public ::testing::Test {
mock_neteq_controller_ = mock.get();
deps.neteq_controller = std::move(mock);
} else {
- deps.stats = std::make_unique<StatisticsCalculator>();
NetEqController::Config controller_config;
controller_config.tick_timer = tick_timer_;
controller_config.base_min_delay_ms = config_.min_delay_ms;
@@ -329,15 +328,10 @@ TEST_F(NetEqImplTest, InsertPacket) {
// Expectations for packet buffer.
EXPECT_CALL(*mock_packet_buffer_, Empty())
.WillOnce(Return(false)); // Called once after first packet is inserted.
- EXPECT_CALL(*mock_packet_buffer_, Flush(_)).Times(1);
- EXPECT_CALL(*mock_packet_buffer_, InsertPacketList(_, _, _, _, _, _, _, _))
+ EXPECT_CALL(*mock_packet_buffer_, Flush()).Times(1);
+ EXPECT_CALL(*mock_packet_buffer_, InsertPacket(_))
.Times(2)
- .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType),
- WithArg<0>(Invoke(DeletePacketsAndReturnOk))));
- // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based
- // index) is a pointer, and the variable pointed to is set to kPayloadType.
- // Also invoke the function DeletePacketsAndReturnOk to properly delete all
- // packets in the list (to avoid memory leaks in the test).
+ .WillRepeatedly(Return(PacketBuffer::kOK));
EXPECT_CALL(*mock_packet_buffer_, PeekNextPacket())
.Times(1)
.WillOnce(Return(&fake_packet));
@@ -1246,12 +1240,15 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
EXPECT_EQ(kChannels, output.num_channels_);
EXPECT_THAT(output.packet_infos_, IsEmpty());
- // Second call to GetAudio will decode the packet that is ok. No errors are
- // expected.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
- EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels);
- EXPECT_EQ(kChannels, output.num_channels_);
- EXPECT_THAT(output.packet_infos_, SizeIs(1));
+ // Call GetAudio until the next packet is decoded.
+ int calls = 0;
+ int kTimeout = 10;
+ while (output.packet_infos_.empty() && calls < kTimeout) {
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
+ EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels);
+ EXPECT_EQ(kChannels, output.num_channels_);
+ }
+ EXPECT_LT(calls, kTimeout);
// Die isn't called through NiceMock (since it's called by the
// MockAudioDecoder constructor), so it needs to be mocked explicitly.
@@ -1640,6 +1637,74 @@ TEST_F(NetEqImplTest, NoCrashWith1000Channels) {
}
}
+// The test first inserts a packet with narrow-band CNG, then a packet with
+// wide-band speech. The expected behavior is to detect a change in sample rate,
+// even though no speech packet has been inserted before, and flush out the CNG
+// packet.
+TEST_F(NetEqImplTest, CngFirstThenSpeechWithNewSampleRate) {
+ UseNoMocks();
+ CreateInstance();
+ constexpr int kCnPayloadType = 7;
+ neteq_->RegisterPayloadType(kCnPayloadType, SdpAudioFormat("cn", 8000, 1));
+ constexpr int kSpeechPayloadType = 8;
+ neteq_->RegisterPayloadType(kSpeechPayloadType,
+ SdpAudioFormat("l16", 16000, 1));
+
+ RTPHeader header;
+ header.payloadType = kCnPayloadType;
+ uint8_t payload[320] = {0};
+
+ EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK);
+ EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 0u);
+
+ header.payloadType = kSpeechPayloadType;
+ header.timestamp += 160;
+ EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK);
+ // CN packet should be discarded, since it does not match the
+ // new speech sample rate.
+ EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 1u);
+
+ // Next decoded packet should be speech.
+ AudioFrame audio_frame;
+ bool muted;
+ EXPECT_EQ(neteq_->GetAudio(&audio_frame, &muted), NetEq::kOK);
+ EXPECT_EQ(audio_frame.sample_rate_hz(), 16000);
+ EXPECT_EQ(audio_frame.speech_type_, AudioFrame::SpeechType::kNormalSpeech);
+}
+
+TEST_F(NetEqImplTest, InsertPacketChangePayloadType) {
+ UseNoMocks();
+ CreateInstance();
+ constexpr int kPcmuPayloadType = 7;
+ neteq_->RegisterPayloadType(kPcmuPayloadType,
+ SdpAudioFormat("pcmu", 8000, 1));
+ constexpr int kPcmaPayloadType = 8;
+ neteq_->RegisterPayloadType(kPcmaPayloadType,
+ SdpAudioFormat("pcma", 8000, 1));
+
+ RTPHeader header;
+ header.payloadType = kPcmuPayloadType;
+ header.timestamp = 1234;
+ uint8_t payload[160] = {0};
+
+ EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK);
+ EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 0u);
+
+ header.payloadType = kPcmaPayloadType;
+ header.timestamp += 80;
+ EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK);
+ // The previous packet should be discarded since the codec changed.
+ EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 1u);
+
+ // Next decoded packet should be speech.
+ AudioFrame audio_frame;
+ bool muted;
+ EXPECT_EQ(neteq_->GetAudio(&audio_frame, &muted), NetEq::kOK);
+ EXPECT_EQ(audio_frame.sample_rate_hz(), 8000);
+ EXPECT_EQ(audio_frame.speech_type_, AudioFrame::SpeechType::kNormalSpeech);
+ // TODO(jakobi): check active decoder.
+}
+
class Decoder120ms : public AudioDecoder {
public:
Decoder120ms(int sample_rate_hz, SpeechType speech_type)
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index a669ad727e..da516982c7 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -273,15 +273,16 @@ class NetEqNetworkStatsTest {
// Next we introduce packet losses.
SetPacketLossRate(0.1);
- expects.stats_ref.expand_rate = expects.stats_ref.speech_expand_rate = 898;
+ expects.expand_rate = expects.speech_expand_rate = kLargerThan;
RunTest(50, expects);
// Next we enable FEC.
decoder_->set_fec_enabled(true);
// If FEC fills in the lost packets, no packet loss will be counted.
+ expects.expand_rate = expects.speech_expand_rate = kEqual;
expects.stats_ref.expand_rate = expects.stats_ref.speech_expand_rate = 0;
- expects.stats_ref.secondary_decoded_rate = 2006;
- expects.stats_ref.secondary_discarded_rate = 14336;
+ expects.secondary_decoded_rate = kLargerThan;
+ expects.secondary_discarded_rate = kLargerThan;
RunTest(50, expects);
}
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_unittest.cc
index 77bd5b5035..aec7e580ec 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -31,7 +31,6 @@
#include "modules/include/module_common_types_public.h"
#include "modules/rtp_rtcp/include/rtcp_statistics.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/message_digest.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/strings/string_builder.h"
@@ -77,11 +76,11 @@ TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) {
webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp");
const std::string output_checksum =
- "fec6827bb9ee0b21770bbbb4a3a6f8823bf537dc|"
- "3610cc7be4b3407b9c273b1299ab7f8f47cca96b";
+ "2efdbea92c3fb2383c59f89d881efec9f94001d0|"
+ "a6831b946b59913852ae3e53f99fa8f209bb23cd";
const std::string network_stats_checksum =
- "3d043e47e5f4bb81d37e7bce8c44bf802965c853|"
+ "dfaf4399fd60293405290476ccf1c05c807c71a0|"
"076662525572dba753b11578330bd491923f7f5e";
DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum,
@@ -99,11 +98,11 @@ TEST_F(NetEqDecodingTest, MAYBE_TestOpusDtxBitExactness) {
webrtc::test::ResourcePath("audio_coding/neteq_opus_dtx", "rtp");
const std::string output_checksum =
- "b3c4899eab5378ef5e54f2302948872149f6ad5e|"
- "589e975ec31ea13f302457fea1425be9380ffb96";
+ "7eddce841cbfa500964c91cdae78b01b9f448948|"
+ "5d13affec87bf4cc8c7667f0cd0d25e1ad09c7c3";
const std::string network_stats_checksum =
- "dc8447b9fee1a21fd5d1f4045d62b982a3fb0215";
+ "92b0fdcbf8bb9354d40140b7312f2fb76a078555";
DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum,
absl::GetFlag(FLAGS_gen_ref));
@@ -165,7 +164,7 @@ TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDrift) {
const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
const double kNetworkFreezeTimeMs = 0.0;
const bool kGetAudioDuringFreezeRecovery = false;
- const int kDelayToleranceMs = 20;
+ const int kDelayToleranceMs = 60;
const int kMaxTimeToSpeechMs = 100;
LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs,
kGetAudioDuringFreezeRecovery, kDelayToleranceMs,
@@ -495,7 +494,7 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) {
timestamp += kCngPeriodSamples;
uint32_t first_speech_timestamp = timestamp;
// Insert speech again.
- for (int i = 0; i < 3; ++i) {
+ for (int i = 0; i < 4; ++i) {
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload));
++seq_no;
@@ -700,8 +699,7 @@ TEST_F(NetEqDecodingTestWithMutedState, MutedStateOldPacket) {
for (int i = 0; i < 5; ++i) {
InsertPacket(kSamples * (i - 1000));
}
- EXPECT_FALSE(GetAudioReturnMuted());
- EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
+ GetAudioUntilNormal();
}
// Verifies that NetEq doesn't enter muted state when CNG mode is active and the
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/packet_arrival_history.h b/third_party/libwebrtc/modules/audio_coding/neteq/packet_arrival_history.h
index cad362b469..722caf5688 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/packet_arrival_history.h
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/packet_arrival_history.h
@@ -26,6 +26,7 @@ namespace webrtc {
class PacketArrivalHistory {
public:
explicit PacketArrivalHistory(int window_size_ms);
+ virtual ~PacketArrivalHistory() = default;
// Insert packet with `rtp_timestamp` and `arrival_time_ms` into the history.
void Insert(uint32_t rtp_timestamp, int64_t arrival_time_ms);
@@ -34,10 +35,10 @@ class PacketArrivalHistory {
// `(time_ms - p.arrival_time_ms) - (rtp_timestamp - p.rtp_timestamp)`
// where `p` is chosen as the packet arrival in the history that maximizes the
// delay.
- int GetDelayMs(uint32_t rtp_timestamp, int64_t time_ms) const;
+ virtual int GetDelayMs(uint32_t rtp_timestamp, int64_t time_ms) const;
// Get the maximum packet arrival delay observed in the history.
- int GetMaxDelayMs() const;
+ virtual int GetMaxDelayMs() const;
bool IsNewestRtpTimestamp(uint32_t rtp_timestamp) const;
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.cc b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.cc
index 9bfa908ab9..47c391a18f 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.cc
@@ -44,53 +44,14 @@ class NewTimestampIsLarger {
const Packet& new_packet_;
};
-// Returns true if both payload types are known to the decoder database, and
-// have the same sample rate.
-bool EqualSampleRates(uint8_t pt1,
- uint8_t pt2,
- const DecoderDatabase& decoder_database) {
- auto* di1 = decoder_database.GetDecoderInfo(pt1);
- auto* di2 = decoder_database.GetDecoderInfo(pt2);
- return di1 && di2 && di1->SampleRateHz() == di2->SampleRateHz();
-}
-
-void LogPacketDiscarded(int codec_level, StatisticsCalculator* stats) {
- RTC_CHECK(stats);
- if (codec_level > 0) {
- stats->SecondaryPacketsDiscarded(1);
- } else {
- stats->PacketsDiscarded(1);
- }
-}
-
-absl::optional<SmartFlushingConfig> GetSmartflushingConfig() {
- absl::optional<SmartFlushingConfig> result;
- std::string field_trial_string =
- field_trial::FindFullName("WebRTC-Audio-NetEqSmartFlushing");
- result = SmartFlushingConfig();
- bool enabled = false;
- auto parser = StructParametersParser::Create(
- "enabled", &enabled, "target_level_threshold_ms",
- &result->target_level_threshold_ms, "target_level_multiplier",
- &result->target_level_multiplier);
- parser->Parse(field_trial_string);
- if (!enabled) {
- return absl::nullopt;
- }
- RTC_LOG(LS_INFO) << "Using smart flushing, target_level_threshold_ms: "
- << result->target_level_threshold_ms
- << ", target_level_multiplier: "
- << result->target_level_multiplier;
- return result;
-}
-
} // namespace
PacketBuffer::PacketBuffer(size_t max_number_of_packets,
- const TickTimer* tick_timer)
- : smart_flushing_config_(GetSmartflushingConfig()),
- max_number_of_packets_(max_number_of_packets),
- tick_timer_(tick_timer) {}
+ const TickTimer* tick_timer,
+ StatisticsCalculator* stats)
+ : max_number_of_packets_(max_number_of_packets),
+ tick_timer_(tick_timer),
+ stats_(stats) {}
// Destructor. All packets in the buffer will be destroyed.
PacketBuffer::~PacketBuffer() {
@@ -98,45 +59,19 @@ PacketBuffer::~PacketBuffer() {
}
// Flush the buffer. All packets in the buffer will be destroyed.
-void PacketBuffer::Flush(StatisticsCalculator* stats) {
+void PacketBuffer::Flush() {
for (auto& p : buffer_) {
- LogPacketDiscarded(p.priority.codec_level, stats);
+ LogPacketDiscarded(p.priority.codec_level);
}
buffer_.clear();
- stats->FlushedPacketBuffer();
-}
-
-void PacketBuffer::PartialFlush(int target_level_ms,
- size_t sample_rate,
- size_t last_decoded_length,
- StatisticsCalculator* stats) {
- // Make sure that at least half the packet buffer capacity will be available
- // after the flush. This is done to avoid getting stuck if the target level is
- // very high.
- int target_level_samples =
- std::min(target_level_ms * sample_rate / 1000,
- max_number_of_packets_ * last_decoded_length / 2);
- // We should avoid flushing to very low levels.
- target_level_samples = std::max(
- target_level_samples, smart_flushing_config_->target_level_threshold_ms);
- while (GetSpanSamples(last_decoded_length, sample_rate, false) >
- static_cast<size_t>(target_level_samples) ||
- buffer_.size() > max_number_of_packets_ / 2) {
- LogPacketDiscarded(PeekNextPacket()->priority.codec_level, stats);
- buffer_.pop_front();
- }
+ stats_->FlushedPacketBuffer();
}
bool PacketBuffer::Empty() const {
return buffer_.empty();
}
-int PacketBuffer::InsertPacket(Packet&& packet,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms,
- const DecoderDatabase& decoder_database) {
+int PacketBuffer::InsertPacket(Packet&& packet) {
if (packet.empty()) {
RTC_LOG(LS_WARNING) << "InsertPacket invalid packet";
return kInvalidPacket;
@@ -149,32 +84,11 @@ int PacketBuffer::InsertPacket(Packet&& packet,
packet.waiting_time = tick_timer_->GetNewStopwatch();
- // Perform a smart flush if the buffer size exceeds a multiple of the target
- // level.
- const size_t span_threshold =
- smart_flushing_config_
- ? smart_flushing_config_->target_level_multiplier *
- std::max(smart_flushing_config_->target_level_threshold_ms,
- target_level_ms) *
- sample_rate / 1000
- : 0;
- const bool smart_flush =
- smart_flushing_config_.has_value() &&
- GetSpanSamples(last_decoded_length, sample_rate, false) >= span_threshold;
- if (buffer_.size() >= max_number_of_packets_ || smart_flush) {
- size_t buffer_size_before_flush = buffer_.size();
- if (smart_flushing_config_.has_value()) {
- // Flush down to the target level.
- PartialFlush(target_level_ms, sample_rate, last_decoded_length, stats);
- return_val = kPartialFlush;
- } else {
- // Buffer is full.
- Flush(stats);
- return_val = kFlushed;
- }
- RTC_LOG(LS_WARNING) << "Packet buffer flushed, "
- << (buffer_size_before_flush - buffer_.size())
- << " packets discarded.";
+ if (buffer_.size() >= max_number_of_packets_) {
+ // Buffer is full.
+ Flush();
+ return_val = kFlushed;
+ RTC_LOG(LS_WARNING) << "Packet buffer flushed.";
}
// Get an iterator pointing to the place in the buffer where the new packet
@@ -187,7 +101,7 @@ int PacketBuffer::InsertPacket(Packet&& packet,
// timestamp as `rit`, which has a higher priority, do not insert the new
// packet to list.
if (rit != buffer_.rend() && packet.timestamp == rit->timestamp) {
- LogPacketDiscarded(packet.priority.codec_level, stats);
+ LogPacketDiscarded(packet.priority.codec_level);
return return_val;
}
@@ -196,7 +110,7 @@ int PacketBuffer::InsertPacket(Packet&& packet,
// packet.
PacketList::iterator it = rit.base();
if (it != buffer_.end() && packet.timestamp == it->timestamp) {
- LogPacketDiscarded(it->priority.codec_level, stats);
+ LogPacketDiscarded(it->priority.codec_level);
it = buffer_.erase(it);
}
buffer_.insert(it, std::move(packet)); // Insert the packet at that position.
@@ -204,57 +118,6 @@ int PacketBuffer::InsertPacket(Packet&& packet,
return return_val;
}
-int PacketBuffer::InsertPacketList(
- PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- absl::optional<uint8_t>* current_rtp_payload_type,
- absl::optional<uint8_t>* current_cng_rtp_payload_type,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms) {
- RTC_DCHECK(stats);
- bool flushed = false;
- for (auto& packet : *packet_list) {
- if (decoder_database.IsComfortNoise(packet.payload_type)) {
- if (*current_cng_rtp_payload_type &&
- **current_cng_rtp_payload_type != packet.payload_type) {
- // New CNG payload type implies new codec type.
- *current_rtp_payload_type = absl::nullopt;
- Flush(stats);
- flushed = true;
- }
- *current_cng_rtp_payload_type = packet.payload_type;
- } else if (!decoder_database.IsDtmf(packet.payload_type)) {
- // This must be speech.
- if ((*current_rtp_payload_type &&
- **current_rtp_payload_type != packet.payload_type) ||
- (*current_cng_rtp_payload_type &&
- !EqualSampleRates(packet.payload_type,
- **current_cng_rtp_payload_type,
- decoder_database))) {
- *current_cng_rtp_payload_type = absl::nullopt;
- Flush(stats);
- flushed = true;
- }
- *current_rtp_payload_type = packet.payload_type;
- }
- int return_val =
- InsertPacket(std::move(packet), stats, last_decoded_length, sample_rate,
- target_level_ms, decoder_database);
- if (return_val == kFlushed) {
- // The buffer flushed, but this is not an error. We can still continue.
- flushed = true;
- } else if (return_val != kOK) {
- // An error occurred. Delete remaining packets in list and return.
- packet_list->clear();
- return return_val;
- }
- }
- packet_list->clear();
- return flushed ? kFlushed : kOK;
-}
-
int PacketBuffer::NextTimestamp(uint32_t* next_timestamp) const {
if (Empty()) {
return kBufferEmpty;
@@ -303,43 +166,40 @@ absl::optional<Packet> PacketBuffer::GetNextPacket() {
return packet;
}
-int PacketBuffer::DiscardNextPacket(StatisticsCalculator* stats) {
+int PacketBuffer::DiscardNextPacket() {
if (Empty()) {
return kBufferEmpty;
}
// Assert that the packet sanity checks in InsertPacket method works.
const Packet& packet = buffer_.front();
RTC_DCHECK(!packet.empty());
- LogPacketDiscarded(packet.priority.codec_level, stats);
+ LogPacketDiscarded(packet.priority.codec_level);
buffer_.pop_front();
return kOK;
}
void PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit,
- uint32_t horizon_samples,
- StatisticsCalculator* stats) {
- buffer_.remove_if([timestamp_limit, horizon_samples, stats](const Packet& p) {
+ uint32_t horizon_samples) {
+ buffer_.remove_if([this, timestamp_limit, horizon_samples](const Packet& p) {
if (timestamp_limit == p.timestamp ||
!IsObsoleteTimestamp(p.timestamp, timestamp_limit, horizon_samples)) {
return false;
}
- LogPacketDiscarded(p.priority.codec_level, stats);
+ LogPacketDiscarded(p.priority.codec_level);
return true;
});
}
-void PacketBuffer::DiscardAllOldPackets(uint32_t timestamp_limit,
- StatisticsCalculator* stats) {
- DiscardOldPackets(timestamp_limit, 0, stats);
+void PacketBuffer::DiscardAllOldPackets(uint32_t timestamp_limit) {
+ DiscardOldPackets(timestamp_limit, 0);
}
-void PacketBuffer::DiscardPacketsWithPayloadType(uint8_t payload_type,
- StatisticsCalculator* stats) {
- buffer_.remove_if([payload_type, stats](const Packet& p) {
+void PacketBuffer::DiscardPacketsWithPayloadType(uint8_t payload_type) {
+ buffer_.remove_if([this, payload_type](const Packet& p) {
if (p.payload_type != payload_type) {
return false;
}
- LogPacketDiscarded(p.priority.codec_level, stats);
+ LogPacketDiscarded(p.priority.codec_level);
return true;
});
}
@@ -404,4 +264,12 @@ bool PacketBuffer::ContainsDtxOrCngPacket(
return false;
}
+void PacketBuffer::LogPacketDiscarded(int codec_level) {
+ if (codec_level > 0) {
+ stats_->SecondaryPacketsDiscarded(1);
+ } else {
+ stats_->PacketsDiscarded(1);
+ }
+}
+
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.h b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.h
index 1eef64a02c..795dd4e812 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -21,14 +21,6 @@ namespace webrtc {
class DecoderDatabase;
class StatisticsCalculator;
class TickTimer;
-struct SmartFlushingConfig {
- // When calculating the flushing threshold, the maximum between the target
- // level and this value is used.
- int target_level_threshold_ms = 500;
- // A smart flush is triggered when the packet buffer contains a multiple of
- // the target level.
- int target_level_multiplier = 3;
-};
// This is the actual buffer holding the packets before decoding.
class PacketBuffer {
@@ -36,7 +28,6 @@ class PacketBuffer {
enum BufferReturnCodes {
kOK = 0,
kFlushed,
- kPartialFlush,
kNotFound,
kBufferEmpty,
kInvalidPacket,
@@ -45,7 +36,9 @@ class PacketBuffer {
// Constructor creates a buffer which can hold a maximum of
// `max_number_of_packets` packets.
- PacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer);
+ PacketBuffer(size_t max_number_of_packets,
+ const TickTimer* tick_timer,
+ StatisticsCalculator* stats);
// Deletes all packets in the buffer before destroying the buffer.
virtual ~PacketBuffer();
@@ -54,13 +47,7 @@ class PacketBuffer {
PacketBuffer& operator=(const PacketBuffer&) = delete;
// Flushes the buffer and deletes all packets in it.
- virtual void Flush(StatisticsCalculator* stats);
-
- // Partial flush. Flush packets but leave some packets behind.
- virtual void PartialFlush(int target_level_ms,
- size_t sample_rate,
- size_t last_decoded_length,
- StatisticsCalculator* stats);
+ virtual void Flush();
// Returns true for an empty buffer.
virtual bool Empty() const;
@@ -69,30 +56,7 @@ class PacketBuffer {
// the packet object.
// Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer
// was flushed due to overfilling.
- virtual int InsertPacket(Packet&& packet,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms,
- const DecoderDatabase& decoder_database);
-
- // Inserts a list of packets into the buffer. The buffer will take over
- // ownership of the packet objects.
- // Returns PacketBuffer::kOK if all packets were inserted successfully.
- // If the buffer was flushed due to overfilling, only a subset of the list is
- // inserted, and PacketBuffer::kFlushed is returned.
- // The last three parameters are included for legacy compatibility.
- // TODO(hlundin): Redesign to not use current_*_payload_type and
- // decoder_database.
- virtual int InsertPacketList(
- PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- absl::optional<uint8_t>* current_rtp_payload_type,
- absl::optional<uint8_t>* current_cng_rtp_payload_type,
- StatisticsCalculator* stats,
- size_t last_decoded_length,
- size_t sample_rate,
- int target_level_ms);
+ virtual int InsertPacket(Packet&& packet);
// Gets the timestamp for the first packet in the buffer and writes it to the
// output variable `next_timestamp`.
@@ -119,7 +83,7 @@ class PacketBuffer {
// Discards the first packet in the buffer. The packet is deleted.
// Returns PacketBuffer::kBufferEmpty if the buffer is empty,
// PacketBuffer::kOK otherwise.
- virtual int DiscardNextPacket(StatisticsCalculator* stats);
+ virtual int DiscardNextPacket();
// Discards all packets that are (strictly) older than timestamp_limit,
// but newer than timestamp_limit - horizon_samples. Setting horizon_samples
@@ -127,16 +91,13 @@ class PacketBuffer {
// is, if a packet is more than 2^31 timestamps into the future compared with
// timestamp_limit (including wrap-around), it is considered old.
virtual void DiscardOldPackets(uint32_t timestamp_limit,
- uint32_t horizon_samples,
- StatisticsCalculator* stats);
+ uint32_t horizon_samples);
// Discards all packets that are (strictly) older than timestamp_limit.
- virtual void DiscardAllOldPackets(uint32_t timestamp_limit,
- StatisticsCalculator* stats);
+ virtual void DiscardAllOldPackets(uint32_t timestamp_limit);
// Removes all packets with a specific payload type from the buffer.
- virtual void DiscardPacketsWithPayloadType(uint8_t payload_type,
- StatisticsCalculator* stats);
+ virtual void DiscardPacketsWithPayloadType(uint8_t payload_type);
// Returns the number of packets in the buffer, including duplicates and
// redundant packets.
@@ -171,10 +132,12 @@ class PacketBuffer {
}
private:
- absl::optional<SmartFlushingConfig> smart_flushing_config_;
+ void LogPacketDiscarded(int codec_level);
+
size_t max_number_of_packets_;
PacketList buffer_;
const TickTimer* tick_timer_;
+ StatisticsCalculator* stats_;
};
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
index b0079645ff..8f307a9eaf 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -108,26 +108,23 @@ namespace webrtc {
TEST(PacketBuffer, CreateAndDestroy) {
TickTimer tick_timer;
- PacketBuffer* buffer = new PacketBuffer(10, &tick_timer); // 10 packets.
+ StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer* buffer =
+ new PacketBuffer(10, &tick_timer, &mock_stats); // 10 packets.
EXPECT_TRUE(buffer->Empty());
delete buffer;
}
TEST(PacketBuffer, InsertPacket) {
TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(17u, 4711u, 0, 10);
StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets.
+ PacketGenerator gen(17u, 4711u, 0, 10);
MockDecoderDatabase decoder_database;
const int payload_len = 100;
const Packet packet = gen.NextPacket(payload_len, nullptr);
- EXPECT_EQ(0, buffer.InsertPacket(/*packet=*/packet.Clone(),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/10000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ EXPECT_EQ(0, buffer.InsertPacket(/*packet=*/packet.Clone()));
uint32_t next_ts;
EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
EXPECT_EQ(4711u, next_ts);
@@ -144,28 +141,22 @@ TEST(PacketBuffer, InsertPacket) {
// Test to flush buffer.
TEST(PacketBuffer, FlushBuffer) {
TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
+ StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
const int payload_len = 10;
- StrictMock<MockStatisticsCalculator> mock_stats;
MockDecoderDatabase decoder_database;
// Insert 10 small packets; should be ok.
for (int i = 0; i < 10; ++i) {
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(/*packet=*/gen.NextPacket(
+ payload_len, nullptr)));
}
EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
EXPECT_FALSE(buffer.Empty());
EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(10);
- buffer.Flush(&mock_stats);
+ buffer.Flush();
// Buffer should delete the payloads itself.
EXPECT_EQ(0u, buffer.NumPacketsInBuffer());
EXPECT_TRUE(buffer.Empty());
@@ -175,23 +166,17 @@ TEST(PacketBuffer, FlushBuffer) {
// Test to fill the buffer over the limits, and verify that it flushes.
TEST(PacketBuffer, OverfillBuffer) {
TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(0, 0, 0, 10);
StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets.
+ PacketGenerator gen(0, 0, 0, 10);
MockDecoderDatabase decoder_database;
// Insert 10 small packets; should be ok.
const int payload_len = 10;
int i;
for (i = 0; i < 10; ++i) {
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(/*packet=*/gen.NextPacket(
+ payload_len, nullptr)));
}
EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
uint32_t next_ts;
@@ -202,12 +187,7 @@ TEST(PacketBuffer, OverfillBuffer) {
const Packet packet = gen.NextPacket(payload_len, nullptr);
// Insert 11th packet; should flush the buffer and insert it after flushing.
EXPECT_EQ(PacketBuffer::kFlushed,
- buffer.InsertPacket(/*packet=*/packet.Clone(),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/packet.Clone()));
EXPECT_EQ(1u, buffer.NumPacketsInBuffer());
EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
// Expect last inserted packet to be first in line.
@@ -216,190 +196,11 @@ TEST(PacketBuffer, OverfillBuffer) {
EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
}
-// Test a partial buffer flush.
-TEST(PacketBuffer, PartialFlush) {
- // Use a field trial to configure smart flushing.
- test::ScopedFieldTrials field_trials(
- "WebRTC-Audio-NetEqSmartFlushing/enabled:true,"
- "target_level_threshold_ms:0,target_level_multiplier:2/");
- TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(0, 0, 0, 10);
- const int payload_len = 10;
- StrictMock<MockStatisticsCalculator> mock_stats;
- MockDecoderDatabase decoder_database;
-
- // Insert 10 small packets; should be ok.
- for (int i = 0; i < 10; ++i) {
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/100,
- /*decoder_database=*/decoder_database));
- }
- EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
- EXPECT_FALSE(buffer.Empty());
-
- EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(7);
- buffer.PartialFlush(/*target_level_ms=*/30,
- /*sample_rate=*/1000,
- /*last_decoded_length=*/payload_len,
- /*stats=*/&mock_stats);
- // There should still be some packets left in the buffer.
- EXPECT_EQ(3u, buffer.NumPacketsInBuffer());
- EXPECT_FALSE(buffer.Empty());
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test to fill the buffer over the limits, and verify that the smart flush
-// functionality works as expected.
-TEST(PacketBuffer, SmartFlushOverfillBuffer) {
- // Use a field trial to configure smart flushing.
- test::ScopedFieldTrials field_trials(
- "WebRTC-Audio-NetEqSmartFlushing/enabled:true,"
- "target_level_threshold_ms:0,target_level_multiplier:2/");
- TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(0, 0, 0, 10);
- StrictMock<MockStatisticsCalculator> mock_stats;
- MockDecoderDatabase decoder_database;
-
- // Insert 10 small packets; should be ok.
- const int payload_len = 10;
- int i;
- for (i = 0; i < 10; ++i) {
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/100,
- /*decoder_database=*/decoder_database));
- }
- EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
- uint32_t next_ts;
- EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
- EXPECT_EQ(0u, next_ts); // Expect first inserted packet to be first in line.
-
- const Packet packet = gen.NextPacket(payload_len, nullptr);
- EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(6);
- // Insert 11th packet; should cause a partial flush and insert the packet
- // after flushing.
- EXPECT_EQ(PacketBuffer::kPartialFlush,
- buffer.InsertPacket(/*packet=*/packet.Clone(),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/40,
- /*decoder_database=*/decoder_database));
- EXPECT_EQ(5u, buffer.NumPacketsInBuffer());
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test inserting a list of packets.
-TEST(PacketBuffer, InsertPacketList) {
- TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(0, 0, 0, 10);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- list.push_back(gen.NextPacket(payload_len, nullptr));
- }
-
- MockDecoderDatabase decoder_database;
- auto factory = CreateBuiltinAudioDecoderFactory();
- const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(0))
- .WillRepeatedly(Return(&info));
-
- StrictMock<MockStatisticsCalculator> mock_stats;
-
- absl::optional<uint8_t> current_pt;
- absl::optional<uint8_t> current_cng_pt;
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
- EXPECT_EQ(0, current_pt); // Current payload type changed to 0.
- EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type not changed.
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// Test inserting a list of packets. Last packet is of a different payload type.
-// Expecting the buffer to flush.
-// TODO(hlundin): Remove this test when legacy operation is no longer needed.
-TEST(PacketBuffer, InsertPacketListChangePayloadType) {
- TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- PacketGenerator gen(0, 0, 0, 10);
- PacketList list;
- const int payload_len = 10;
-
- // Insert 10 small packets.
- for (int i = 0; i < 10; ++i) {
- list.push_back(gen.NextPacket(payload_len, nullptr));
- }
- // Insert 11th packet of another payload type (not CNG).
- {
- Packet packet = gen.NextPacket(payload_len, nullptr);
- packet.payload_type = 1;
- list.push_back(std::move(packet));
- }
-
- MockDecoderDatabase decoder_database;
- auto factory = CreateBuiltinAudioDecoderFactory();
- const DecoderDatabase::DecoderInfo info0(SdpAudioFormat("pcmu", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(0))
- .WillRepeatedly(Return(&info0));
- const DecoderDatabase::DecoderInfo info1(SdpAudioFormat("pcma", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(1))
- .WillRepeatedly(Return(&info1));
-
- StrictMock<MockStatisticsCalculator> mock_stats;
-
- absl::optional<uint8_t> current_pt;
- absl::optional<uint8_t> current_cng_pt;
- EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(10);
- EXPECT_EQ(
- PacketBuffer::kFlushed,
- buffer.InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(1u, buffer.NumPacketsInBuffer()); // Only the last packet.
- EXPECT_EQ(1, current_pt); // Current payload type changed to 1.
- EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type not changed.
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
TEST(PacketBuffer, ExtractOrderRedundancy) {
TickTimer tick_timer;
- PacketBuffer buffer(100, &tick_timer); // 100 packets.
+ StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets.
const int kPackets = 18;
const int kFrameSize = 10;
const int kPayloadLength = 10;
@@ -423,8 +224,6 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
PacketGenerator gen(0, 0, 0, kFrameSize);
- StrictMock<MockStatisticsCalculator> mock_stats;
-
// Interleaving the EXPECT_CALL sequence with expectations on the MockFunction
// check ensures that exactly one call to PacketsDiscarded happens in each
// DiscardNextPacket call.
@@ -444,12 +243,7 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
}
EXPECT_CALL(check, Call(i));
EXPECT_EQ(PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/packet.Clone(),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kPayloadLength,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/packet.Clone()));
if (packet_facts[i].extract_order >= 0) {
expect_order[packet_facts[i].extract_order] = std::move(packet);
}
@@ -468,25 +262,20 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
TEST(PacketBuffer, DiscardPackets) {
TickTimer tick_timer;
- PacketBuffer buffer(100, &tick_timer); // 100 packets.
+ StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
PacketList list;
const int payload_len = 10;
- StrictMock<MockStatisticsCalculator> mock_stats;
MockDecoderDatabase decoder_database;
constexpr int kTotalPackets = 10;
// Insert 10 small packets.
for (int i = 0; i < kTotalPackets; ++i) {
- buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database);
+ buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr));
}
EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
@@ -507,7 +296,7 @@ TEST(PacketBuffer, DiscardPackets) {
EXPECT_EQ(current_ts, ts);
EXPECT_CALL(mock_stats, PacketsDiscarded(1));
EXPECT_CALL(check, Call(i));
- EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket(&mock_stats));
+ EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket());
current_ts += ts_increment;
check.Call(i);
}
@@ -520,7 +309,7 @@ TEST(PacketBuffer, DiscardPackets) {
.Times(kRemainingPackets - kSkipPackets);
EXPECT_CALL(check, Call(17)); // Arbitrary id number.
buffer.DiscardOldPackets(start_ts + kTotalPackets * ts_increment,
- kRemainingPackets * ts_increment, &mock_stats);
+ kRemainingPackets * ts_increment);
check.Call(17); // Same arbitrary id number.
EXPECT_EQ(kSkipPackets, buffer.NumPacketsInBuffer());
@@ -530,8 +319,7 @@ TEST(PacketBuffer, DiscardPackets) {
// Discard all remaining packets.
EXPECT_CALL(mock_stats, PacketsDiscarded(kSkipPackets));
- buffer.DiscardAllOldPackets(start_ts + kTotalPackets * ts_increment,
- &mock_stats);
+ buffer.DiscardAllOldPackets(start_ts + kTotalPackets * ts_increment);
EXPECT_TRUE(buffer.Empty());
EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
@@ -539,7 +327,8 @@ TEST(PacketBuffer, DiscardPackets) {
TEST(PacketBuffer, Reordering) {
TickTimer tick_timer;
- PacketBuffer buffer(100, &tick_timer); // 100 packets.
+ StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -559,27 +348,9 @@ TEST(PacketBuffer, Reordering) {
}
}
- MockDecoderDatabase decoder_database;
- auto factory = CreateBuiltinAudioDecoderFactory();
- const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(0))
- .WillRepeatedly(Return(&info));
- absl::optional<uint8_t> current_pt;
- absl::optional<uint8_t> current_cng_pt;
-
- StrictMock<MockStatisticsCalculator> mock_stats;
-
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
+ for (Packet& packet : list) {
+ EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(std::move(packet)));
+ }
EXPECT_EQ(10u, buffer.NumPacketsInBuffer());
// Extract them and make sure that come out in the right order.
@@ -591,86 +362,6 @@ TEST(PacketBuffer, Reordering) {
current_ts += ts_increment;
}
EXPECT_TRUE(buffer.Empty());
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
-}
-
-// The test first inserts a packet with narrow-band CNG, then a packet with
-// wide-band speech. The expected behavior of the packet buffer is to detect a
-// change in sample rate, even though no speech packet has been inserted before,
-// and flush out the CNG packet.
-TEST(PacketBuffer, CngFirstThenSpeechWithNewSampleRate) {
- TickTimer tick_timer;
- PacketBuffer buffer(10, &tick_timer); // 10 packets.
- const uint8_t kCngPt = 13;
- const int kPayloadLen = 10;
- const uint8_t kSpeechPt = 100;
-
- MockDecoderDatabase decoder_database;
- auto factory = CreateBuiltinAudioDecoderFactory();
- const DecoderDatabase::DecoderInfo info_cng(SdpAudioFormat("cn", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(kCngPt))
- .WillRepeatedly(Return(&info_cng));
- const DecoderDatabase::DecoderInfo info_speech(
- SdpAudioFormat("l16", 16000, 1), absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(kSpeechPt))
- .WillRepeatedly(Return(&info_speech));
-
- // Insert first packet, which is narrow-band CNG.
- PacketGenerator gen(0, 0, kCngPt, 10);
- PacketList list;
- list.push_back(gen.NextPacket(kPayloadLen, nullptr));
- absl::optional<uint8_t> current_pt;
- absl::optional<uint8_t> current_cng_pt;
-
- StrictMock<MockStatisticsCalculator> mock_stats;
-
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer.InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kPayloadLen,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
- EXPECT_TRUE(list.empty());
- EXPECT_EQ(1u, buffer.NumPacketsInBuffer());
- ASSERT_TRUE(buffer.PeekNextPacket());
- EXPECT_EQ(kCngPt, buffer.PeekNextPacket()->payload_type);
- EXPECT_EQ(current_pt, absl::nullopt); // Current payload type not set.
- EXPECT_EQ(kCngPt, current_cng_pt); // CNG payload type set.
-
- // Insert second packet, which is wide-band speech.
- {
- Packet packet = gen.NextPacket(kPayloadLen, nullptr);
- packet.payload_type = kSpeechPt;
- list.push_back(std::move(packet));
- }
- // Expect the buffer to flush out the CNG packet, since it does not match the
- // new speech sample rate.
- EXPECT_CALL(mock_stats, PacketsDiscarded(1));
- EXPECT_EQ(
- PacketBuffer::kFlushed,
- buffer.InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kPayloadLen,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
- EXPECT_TRUE(list.empty());
- EXPECT_EQ(1u, buffer.NumPacketsInBuffer());
- ASSERT_TRUE(buffer.PeekNextPacket());
- EXPECT_EQ(kSpeechPt, buffer.PeekNextPacket()->payload_type);
-
- EXPECT_EQ(kSpeechPt, current_pt); // Current payload type set.
- EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type reset.
-
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
}
TEST(PacketBuffer, Failures) {
@@ -681,80 +372,26 @@ TEST(PacketBuffer, Failures) {
PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
TickTimer tick_timer;
StrictMock<MockStatisticsCalculator> mock_stats;
- MockDecoderDatabase decoder_database;
- PacketBuffer* buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
+ PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets.
{
Packet packet = gen.NextPacket(payload_len, nullptr);
packet.payload.Clear();
EXPECT_EQ(PacketBuffer::kInvalidPacket,
- buffer->InsertPacket(/*packet=*/std::move(packet),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/std::move(packet)));
}
// Buffer should still be empty. Test all empty-checks.
uint32_t temp_ts;
- EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->NextTimestamp(&temp_ts));
+ EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer.NextTimestamp(&temp_ts));
EXPECT_EQ(PacketBuffer::kBufferEmpty,
- buffer->NextHigherTimestamp(0, &temp_ts));
- EXPECT_EQ(NULL, buffer->PeekNextPacket());
- EXPECT_FALSE(buffer->GetNextPacket());
+ buffer.NextHigherTimestamp(0, &temp_ts));
+ EXPECT_EQ(NULL, buffer.PeekNextPacket());
+ EXPECT_FALSE(buffer.GetNextPacket());
// Discarding packets will not invoke mock_stats.PacketDiscarded() because the
// packet buffer is empty.
- EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->DiscardNextPacket(&mock_stats));
- buffer->DiscardAllOldPackets(0, &mock_stats);
-
- // Insert one packet to make the buffer non-empty.
- EXPECT_EQ(
- PacketBuffer::kOK,
- buffer->InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
- EXPECT_EQ(PacketBuffer::kInvalidPointer, buffer->NextTimestamp(NULL));
- EXPECT_EQ(PacketBuffer::kInvalidPointer,
- buffer->NextHigherTimestamp(0, NULL));
- delete buffer;
-
- // Insert packet list of three packets, where the second packet has an invalid
- // payload. Expect first packet to be inserted, and the remaining two to be
- // discarded.
- buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
- PacketList list;
- list.push_back(gen.NextPacket(payload_len, nullptr)); // Valid packet.
- {
- Packet packet = gen.NextPacket(payload_len, nullptr);
- packet.payload.Clear(); // Invalid.
- list.push_back(std::move(packet));
- }
- list.push_back(gen.NextPacket(payload_len, nullptr)); // Valid packet.
- auto factory = CreateBuiltinAudioDecoderFactory();
- const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1),
- absl::nullopt, factory.get());
- EXPECT_CALL(decoder_database, GetDecoderInfo(0))
- .WillRepeatedly(Return(&info));
- absl::optional<uint8_t> current_pt;
- absl::optional<uint8_t> current_cng_pt;
- EXPECT_EQ(
- PacketBuffer::kInvalidPacket,
- buffer->InsertPacketList(/*packet_list=*/&list,
- /*decoder_database=*/decoder_database,
- /*current_rtp_payload_type=*/&current_pt,
- /*current_cng_rtp_payload_type=*/&current_cng_pt,
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/payload_len,
- /*sample_rate=*/1000,
- /*target_level_ms=*/30));
- EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
- EXPECT_EQ(1u, buffer->NumPacketsInBuffer());
- delete buffer;
- EXPECT_CALL(decoder_database, Die()); // Called when object is deleted.
+ EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer.DiscardNextPacket());
+ buffer.DiscardAllOldPackets(0);
}
// Test packet comparison function.
@@ -873,9 +510,9 @@ TEST(PacketBuffer, GetSpanSamples) {
constexpr int kSampleRateHz = 48000;
constexpr bool kCountWaitingTime = false;
TickTimer tick_timer;
- PacketBuffer buffer(3, &tick_timer);
- PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples);
StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(3, &tick_timer, &mock_stats);
+ PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples);
MockDecoderDatabase decoder_database;
Packet packet_1 = gen.NextPacket(kPayloadSizeBytes, nullptr);
@@ -891,12 +528,7 @@ TEST(PacketBuffer, GetSpanSamples) {
packet_2.timestamp); // Tmestamp wrapped around.
EXPECT_EQ(PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/std::move(packet_1),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kFrameSizeSamples,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/std::move(packet_1)));
constexpr size_t kLastDecodedSizeSamples = 2;
// packet_1 has no access to duration, and relies last decoded duration as
@@ -906,12 +538,7 @@ TEST(PacketBuffer, GetSpanSamples) {
kCountWaitingTime));
EXPECT_EQ(PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/std::move(packet_2),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kFrameSizeSamples,
- /*sample_rate=*/1000,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/std::move(packet_2)));
EXPECT_EQ(kFrameSizeSamples * 2,
buffer.GetSpanSamples(0, kSampleRateHz, kCountWaitingTime));
@@ -931,20 +558,15 @@ TEST(PacketBuffer, GetSpanSamplesCountWaitingTime) {
constexpr bool kCountWaitingTime = true;
constexpr size_t kLastDecodedSizeSamples = 0;
TickTimer tick_timer;
- PacketBuffer buffer(3, &tick_timer);
- PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples);
StrictMock<MockStatisticsCalculator> mock_stats;
+ PacketBuffer buffer(3, &tick_timer, &mock_stats);
+ PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples);
MockDecoderDatabase decoder_database;
Packet packet = gen.NextPacket(kPayloadSizeBytes, nullptr);
EXPECT_EQ(PacketBuffer::kOK,
- buffer.InsertPacket(/*packet=*/std::move(packet),
- /*stats=*/&mock_stats,
- /*last_decoded_length=*/kFrameSizeSamples,
- /*sample_rate=*/kSampleRateHz,
- /*target_level_ms=*/60,
- /*decoder_database=*/decoder_database));
+ buffer.InsertPacket(/*packet=*/std::move(packet)));
EXPECT_EQ(0u, buffer.GetSpanSamples(kLastDecodedSizeSamples, kSampleRateHz,
kCountWaitingTime));
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_decoding_test.cc b/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_decoding_test.cc
index e6c1809fb6..e626d09c99 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_decoding_test.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/test/neteq_decoding_test.cc
@@ -19,13 +19,13 @@
#include "test/testsupport/file_utils.h"
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
-RTC_PUSH_IGNORING_WUNDEF()
+
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/neteq/neteq_unittest.pb.h"
#else
#include "modules/audio_coding/neteq/neteq_unittest.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
+
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq/test/result_sink.cc b/third_party/libwebrtc/modules/audio_coding/neteq/test/result_sink.cc
index f5d50dc859..fee7b49eb3 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq/test/result_sink.cc
+++ b/third_party/libwebrtc/modules/audio_coding/neteq/test/result_sink.cc
@@ -13,19 +13,18 @@
#include <string>
#include "absl/strings/string_view.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/message_digest.h"
#include "rtc_base/string_encode.h"
#include "test/gtest.h"
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
-RTC_PUSH_IGNORING_WUNDEF()
+
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_coding/neteq/neteq_unittest.pb.h"
#else
#include "modules/audio_coding/neteq/neteq_unittest.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
+
#endif
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_coding/neteq_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/neteq_gn/moz.build
index 04dbb03279..834a8d1265 100644
--- a/third_party/libwebrtc/modules/audio_coding/neteq_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/neteq_gn/moz.build
@@ -234,7 +234,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -244,10 +243,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/pcm16b_c_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/pcm16b_c_gn/moz.build
index 41f722069c..ef0c150cb8 100644
--- a/third_party/libwebrtc/modules/audio_coding/pcm16b_c_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/pcm16b_c_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build
index ed96e7c0f8..a1d9c8009d 100644
--- a/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build
@@ -197,7 +197,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -207,10 +206,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/red_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/red_gn/moz.build
index 479cf67a2a..ab0d8129bb 100644
--- a/third_party/libwebrtc/modules/audio_coding/red_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/red_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_cng_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_cng_gn/moz.build
index a8a6c576e2..d077aaa930 100644
--- a/third_party/libwebrtc/modules/audio_coding/webrtc_cng_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/webrtc_cng_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build
index 491f0cc543..d48fd68174 100644
--- a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build
index e2c57b99af..02986beaa4 100644
--- a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_gn/moz.build
@@ -204,7 +204,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -214,10 +213,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build
index 268854264f..e6c31b48b5 100644
--- a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_device/BUILD.gn b/third_party/libwebrtc/modules/audio_device/BUILD.gn
index 4726f93279..a135f042db 100644
--- a/third_party/libwebrtc/modules/audio_device/BUILD.gn
+++ b/third_party/libwebrtc/modules/audio_device/BUILD.gn
@@ -50,6 +50,7 @@ rtc_source_set("audio_device_api") {
"include/audio_device_defines.h",
]
deps = [
+ "../../api:ref_count",
"../../api:scoped_refptr",
"../../api/task_queue",
"../../rtc_base:checks",
@@ -490,7 +491,6 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) {
"../../common_audio",
"../../rtc_base:buffer",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:logging",
"../../rtc_base:macromagic",
"../../rtc_base:race_checker",
diff --git a/third_party/libwebrtc/modules/audio_device/audio_device_gn/moz.build b/third_party/libwebrtc/modules/audio_device/audio_device_gn/moz.build
index df00e056c6..4128efbbf8 100644
--- a/third_party/libwebrtc/modules/audio_device/audio_device_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_device/audio_device_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_device/include/audio_device.h b/third_party/libwebrtc/modules/audio_device/include/audio_device.h
index 936ee6cb04..47d2aecfa7 100644
--- a/third_party/libwebrtc/modules/audio_device/include/audio_device.h
+++ b/third_party/libwebrtc/modules/audio_device/include/audio_device.h
@@ -12,16 +12,16 @@
#define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
#include "absl/types/optional.h"
+#include "api/ref_count.h"
#include "api/scoped_refptr.h"
#include "api/task_queue/task_queue_factory.h"
#include "modules/audio_device/include/audio_device_defines.h"
-#include "rtc_base/ref_count.h"
namespace webrtc {
class AudioDeviceModuleForTest;
-class AudioDeviceModule : public rtc::RefCountInterface {
+class AudioDeviceModule : public webrtc::RefCountInterface {
public:
enum AudioLayer {
kPlatformDefaultAudio = 0,
diff --git a/third_party/libwebrtc/modules/audio_device/include/fake_audio_device.h b/third_party/libwebrtc/modules/audio_device/include/fake_audio_device.h
index 2322ce0263..2a303173e9 100644
--- a/third_party/libwebrtc/modules/audio_device/include/fake_audio_device.h
+++ b/third_party/libwebrtc/modules/audio_device/include/fake_audio_device.h
@@ -23,8 +23,8 @@ class FakeAudioDeviceModule
// references using scoped_refptr. Current code doesn't always use refcounting
// for this class.
void AddRef() const override {}
- rtc::RefCountReleaseStatus Release() const override {
- return rtc::RefCountReleaseStatus::kDroppedLastRef;
+ webrtc::RefCountReleaseStatus Release() const override {
+ return webrtc::RefCountReleaseStatus::kDroppedLastRef;
}
};
diff --git a/third_party/libwebrtc/modules/audio_mixer/audio_frame_manipulator_gn/moz.build b/third_party/libwebrtc/modules/audio_mixer/audio_frame_manipulator_gn/moz.build
index edfac56a3a..cc60512cda 100644
--- a/third_party/libwebrtc/modules/audio_mixer/audio_frame_manipulator_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_mixer/audio_frame_manipulator_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_gn/moz.build b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_gn/moz.build
index 7108d9fbe1..6595939941 100644
--- a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/BUILD.gn b/third_party/libwebrtc/modules/audio_processing/BUILD.gn
index 2b81427da9..6aca7dee46 100644
--- a/third_party/libwebrtc/modules/audio_processing/BUILD.gn
+++ b/third_party/libwebrtc/modules/audio_processing/BUILD.gn
@@ -29,6 +29,7 @@ rtc_library("api") {
":audio_frame_view",
":audio_processing_statistics",
"../../api:array_view",
+ "../../api:ref_count",
"../../api:scoped_refptr",
"../../api/audio:aec3_config",
"../../api/audio:audio_frame_api",
@@ -190,7 +191,6 @@ rtc_library("audio_processing") {
"../../rtc_base:checks",
"../../rtc_base:event_tracer",
"../../rtc_base:gtest_prod",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:logging",
"../../rtc_base:macromagic",
"../../rtc_base:safe_minmax",
@@ -397,7 +397,6 @@ if (rtc_include_tests) {
"../../common_audio:common_audio_c",
"../../rtc_base:checks",
"../../rtc_base:gtest_prod",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:macromagic",
"../../rtc_base:platform_thread",
"../../rtc_base:protobuf_utils",
@@ -573,7 +572,6 @@ if (rtc_include_tests) {
"../../api/audio:echo_detector_creator",
"../../common_audio",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:logging",
"../../rtc_base:protobuf_utils",
"../../rtc_base:rtc_json",
@@ -613,7 +611,6 @@ if (rtc_include_tests) {
deps = [
":audioproc_debug_proto",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../rtc_base:protobuf_utils",
"../../rtc_base/system:arch",
]
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_gn/moz.build
index f21e65fb4a..7435b6a457 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_erl_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_gn/moz.build
index b9c819893f..0d2471073d 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/adaptive_fir_filter_gn/moz.build
@@ -191,16 +191,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn/moz.build
index 097e67bbe5..f9844b6521 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_avx2_gn/moz.build
@@ -181,10 +181,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86":
CXXFLAGS += [
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_common_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_common_gn/moz.build
index 955fe2022f..8d9c4e6bd7 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_common_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_common_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_fft_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_fft_gn/moz.build
index 154d9f4406..d403ae8b96 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_fft_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_fft_gn/moz.build
@@ -191,16 +191,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_gn/moz.build
index 7ad4cffedf..85e5654231 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/aec3_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/aec3_gn/moz.build
@@ -256,7 +256,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -266,10 +265,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/fft_data_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/fft_data_gn/moz.build
index 0084077435..aa1aaf15d9 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/fft_data_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/fft_data_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/matched_filter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/matched_filter_gn/moz.build
index be2c3bbf56..0ebdb0798f 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/matched_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/matched_filter_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/render_buffer_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/render_buffer_gn/moz.build
index 2bd3ae0c01..6444c3137f 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/render_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/render_buffer_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec3/vector_math_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec3/vector_math_gn/moz.build
index e40fdb1cf1..9cf3a7842a 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec3/vector_math_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec3/vector_math_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/BUILD.gn b/third_party/libwebrtc/modules/audio_processing/aec_dump/BUILD.gn
index 38d8776258..78bae56835 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump/BUILD.gn
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/BUILD.gn
@@ -66,7 +66,6 @@ if (rtc_enable_protobuf) {
"../../../api/audio:audio_frame_api",
"../../../api/task_queue",
"../../../rtc_base:checks",
- "../../../rtc_base:ignore_wundef",
"../../../rtc_base:logging",
"../../../rtc_base:macromagic",
"../../../rtc_base:protobuf_utils",
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build
index f1280fed0d..13420467de 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build
@@ -187,16 +187,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_impl.h b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_impl.h
index fac3712b7a..429808f9af 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_impl.h
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_impl.h
@@ -17,20 +17,17 @@
#include "modules/audio_processing/aec_dump/capture_stream_info.h"
#include "modules/audio_processing/include/aec_dump.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/race_checker.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/task_queue.h"
#include "rtc_base/thread_annotations.h"
// Files generated at build-time by the protobuf compiler.
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
#else
#include "modules/audio_processing/debug.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/capture_stream_info.h b/third_party/libwebrtc/modules/audio_processing/aec_dump/capture_stream_info.h
index 0819bbcb23..572990c150 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump/capture_stream_info.h
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/capture_stream_info.h
@@ -15,16 +15,13 @@
#include <utility>
#include "modules/audio_processing/include/aec_dump.h"
-#include "rtc_base/ignore_wundef.h"
// Files generated at build-time by the protobuf compiler.
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
#else
#include "modules/audio_processing/debug.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build
index 2966151ab6..4e8b16442d 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build
index 02b847ed76..89ae508073 100644
--- a/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/aecm/aecm_core_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aecm/aecm_core_gn/moz.build
index 9ff36991fe..ca3ffeb81e 100644
--- a/third_party/libwebrtc/modules/audio_processing/aecm/aecm_core_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/aecm/aecm_core_gn/moz.build
@@ -238,7 +238,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -248,10 +247,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc/agc_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc/agc_gn/moz.build
index f6f4442cfc..f26489f413 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc/agc_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc/agc_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn/moz.build
index be6b4f9b27..ebf241f7a5 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc/gain_control_interface_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn/moz.build
index 4e6e295d34..aa5c6835cc 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc/legacy_agc_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn/moz.build
index 64ffa75960..a272555662 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc/level_estimation_gn/moz.build
@@ -201,7 +201,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -211,10 +210,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build
index 9473ac62f5..5e3b5801ad 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/adaptive_digital_gain_controller_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn/moz.build
index c7a2f6d215..d9520efe2f 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/biquad_filter_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn/moz.build
index e70e3f68e9..dbf53e8e8e 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/common_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/common_gn/moz.build
index 8690613542..44307f4147 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/common_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/common_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/cpu_features_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/cpu_features_gn/moz.build
index 4b0431db1a..e842cac9c3 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/cpu_features_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/cpu_features_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/fixed_digital_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/fixed_digital_gn/moz.build
index 1b8da82f58..60614d4cc1 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/fixed_digital_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/fixed_digital_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/gain_applier_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/gain_applier_gn/moz.build
index bea71dcee3..691900e356 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/gain_applier_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/gain_applier_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_gn/moz.build
index 03eb1fb3a1..ee04e973fb 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/gain_map_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_gn/moz.build
index f1a841d5ae..0bde4db9d4 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_controller_gn/moz.build
@@ -201,7 +201,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -211,10 +210,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build
index 40448f68a9..b7d0a9ba88 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator_gn/moz.build
index 9d4629e9ab..210539ab46 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/noise_level_estimator_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_auto_correlation_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_auto_correlation_gn/moz.build
index 134ffac5fd..7965a026ef 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_auto_correlation_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_auto_correlation_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_common_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_common_gn/moz.build
index cf3de48a57..bdfe90cf16 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_common_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_common_gn/moz.build
@@ -191,16 +191,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_gn/moz.build
index dbb926c5fc..6a73ce96e4 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_layers_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_layers_gn/moz.build
index 92da260f90..27b40f13a5 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_layers_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_layers_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_lp_residual_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_lp_residual_gn/moz.build
index cedb17bc22..d66ed412b2 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_lp_residual_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_lp_residual_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_pitch_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_pitch_gn/moz.build
index d45bc78ff6..0ddc85f5ac 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_pitch_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_pitch_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_ring_buffer_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_ring_buffer_gn/moz.build
index 20da5f3615..25e813a226 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_ring_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_ring_buffer_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_sequence_buffer_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_sequence_buffer_gn/moz.build
index b0ba79562e..f54dd88a23 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_sequence_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_sequence_buffer_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn/moz.build
index 2d8396fa2a..d8b88047d7 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_spectral_features_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn/moz.build
index 143ba6960c..4a3c5bf28b 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_vad_symmetric_matrix_buffer_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn/moz.build
index d4dd169f15..01313fa460 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_avx2_gn/moz.build
@@ -176,10 +176,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "x86":
CXXFLAGS += [
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn/moz.build
index 09fe0c3d24..3f88913309 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/vector_math_gn/moz.build
@@ -191,16 +191,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build
index 6b8def8650..6562d840b7 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/saturation_protector_gn/moz.build
@@ -201,7 +201,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -211,10 +210,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn/moz.build
index 8f2996fa26..3afaa88450 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/speech_level_estimator_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn/moz.build
index 55cfbb60e7..3aa09832b2 100644
--- a/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/vad_wrapper_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build
index 7a02b7e10c..37e50af014 100644
--- a/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/apm_logging_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/apm_logging_gn/moz.build
index 992376cd8a..53fd9d9f94 100644
--- a/third_party/libwebrtc/modules/audio_processing/apm_logging_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/apm_logging_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_buffer_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_buffer_gn/moz.build
index 88031a747d..2087aeb909 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/audio_buffer_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build
index 7e73b70483..737ca5e834 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_frame_view_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_frame_view_gn/moz.build
index 0f81755091..b7391a78b1 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_frame_view_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/audio_frame_view_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_processing_gn/moz.build
index 7dc22bcf2b..5b4f4d5d54 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_gn/moz.build
@@ -206,7 +206,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -216,10 +215,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h
index fe80e0d912..1e058b5a32 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.h
@@ -43,7 +43,6 @@
#include "modules/audio_processing/rms_level.h"
#include "modules/audio_processing/transient/transient_suppressor.h"
#include "rtc_base/gtest_prod_util.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/swap_queue.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc
index 9e50f994b1..e03f966b06 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -48,7 +48,7 @@ class MockInitialize : public AudioProcessingImpl {
}
MOCK_METHOD(void, AddRef, (), (const, override));
- MOCK_METHOD(rtc::RefCountReleaseStatus, Release, (), (const, override));
+ MOCK_METHOD(RefCountReleaseStatus, Release, (), (const, override));
};
// Creates MockEchoControl instances and provides a raw pointer access to
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_statistics_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_processing_statistics_gn/moz.build
index 6d174505ed..6b3e54c3f7 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_statistics_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_statistics_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc
index e320e71405..c2bedb2da4 100644
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_unittest.cc
@@ -38,7 +38,6 @@
#include "rtc_base/checks.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/gtest_prod_util.h"
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/numerics/safe_minmax.h"
#include "rtc_base/protobuf_utils.h"
@@ -51,14 +50,13 @@
#include "test/gtest.h"
#include "test/testsupport/file_utils.h"
-RTC_PUSH_IGNORING_WUNDEF()
-#include "modules/audio_processing/debug.pb.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
#include "external/webrtc/webrtc/modules/audio_processing/test/unittest.pb.h"
#else
+#include "modules/audio_processing/debug.pb.h"
#include "modules/audio_processing/test/unittest.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
ABSL_FLAG(bool,
write_apm_ref_data,
diff --git a/third_party/libwebrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster_gn/moz.build
index d80a3bb1c6..ad198344e2 100644
--- a/third_party/libwebrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/gain_controller2_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/gain_controller2_gn/moz.build
index d6d9d3658b..ab31e68564 100644
--- a/third_party/libwebrtc/modules/audio_processing/gain_controller2_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/gain_controller2_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/high_pass_filter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/high_pass_filter_gn/moz.build
index 8769a3a318..af06d4142f 100644
--- a/third_party/libwebrtc/modules/audio_processing/high_pass_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/high_pass_filter_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h
index f613a38de1..e3223513af 100644
--- a/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h
+++ b/third_party/libwebrtc/modules/audio_processing/include/audio_processing.h
@@ -28,10 +28,10 @@
#include "api/array_view.h"
#include "api/audio/echo_canceller3_config.h"
#include "api/audio/echo_control.h"
+#include "api/ref_count.h"
#include "api/scoped_refptr.h"
#include "modules/audio_processing/include/audio_processing_statistics.h"
#include "rtc_base/arraysize.h"
-#include "rtc_base/ref_count.h"
#include "rtc_base/system/file_wrapper.h"
#include "rtc_base/system/rtc_export.h"
@@ -127,7 +127,7 @@ class CustomProcessing;
// // Close the application...
// apm.reset();
//
-class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface {
+class RTC_EXPORT AudioProcessing : public RefCountInterface {
public:
// The struct below constitutes the new parameter scheme for the audio
// processing. It is being introduced gradually and until it is fully
@@ -912,7 +912,7 @@ class CustomProcessing {
};
// Interface for an echo detector submodule.
-class EchoDetector : public rtc::RefCountInterface {
+class EchoDetector : public RefCountInterface {
public:
// (Re-)Initializes the submodule.
virtual void Initialize(int capture_sample_rate_hz,
diff --git a/third_party/libwebrtc/modules/audio_processing/ns/ns_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/ns/ns_gn/moz.build
index 14595abaf9..ac1c19134a 100644
--- a/third_party/libwebrtc/modules/audio_processing/ns/ns_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/ns/ns_gn/moz.build
@@ -212,7 +212,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -222,10 +221,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/optionally_built_submodule_creators_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/optionally_built_submodule_creators_gn/moz.build
index af79a781f6..da2a1b7ae3 100644
--- a/third_party/libwebrtc/modules/audio_processing/optionally_built_submodule_creators_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/optionally_built_submodule_creators_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/rms_level_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/rms_level_gn/moz.build
index 23f52652ae..d0c4b2bd8e 100644
--- a/third_party/libwebrtc/modules/audio_processing/rms_level_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/rms_level_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/test/aec_dump_based_simulator.h b/third_party/libwebrtc/modules/audio_processing/test/aec_dump_based_simulator.h
index e2c1f3e4ba..4713c800ec 100644
--- a/third_party/libwebrtc/modules/audio_processing/test/aec_dump_based_simulator.h
+++ b/third_party/libwebrtc/modules/audio_processing/test/aec_dump_based_simulator.h
@@ -15,15 +15,12 @@
#include <string>
#include "modules/audio_processing/test/audio_processing_simulator.h"
-#include "rtc_base/ignore_wundef.h"
-RTC_PUSH_IGNORING_WUNDEF()
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
#else
#include "modules/audio_processing/debug.pb.h"
#endif
-RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
namespace test {
diff --git a/third_party/libwebrtc/modules/audio_processing/test/debug_dump_replayer.h b/third_party/libwebrtc/modules/audio_processing/test/debug_dump_replayer.h
index be21c68663..077147eb68 100644
--- a/third_party/libwebrtc/modules/audio_processing/test/debug_dump_replayer.h
+++ b/third_party/libwebrtc/modules/audio_processing/test/debug_dump_replayer.h
@@ -16,11 +16,9 @@
#include "absl/strings/string_view.h"
#include "common_audio/channel_buffer.h"
#include "modules/audio_processing/include/audio_processing.h"
-#include "rtc_base/ignore_wundef.h"
-RTC_PUSH_IGNORING_WUNDEF()
+// Generated at build-time by the protobuf compiler.
#include "modules/audio_processing/debug.pb.h"
-RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
namespace test {
diff --git a/third_party/libwebrtc/modules/audio_processing/test/protobuf_utils.h b/third_party/libwebrtc/modules/audio_processing/test/protobuf_utils.h
index b9c2e819f9..eb93383f5a 100644
--- a/third_party/libwebrtc/modules/audio_processing/test/protobuf_utils.h
+++ b/third_party/libwebrtc/modules/audio_processing/test/protobuf_utils.h
@@ -14,12 +14,10 @@
#include <memory>
#include <sstream> // no-presubmit-check TODO(webrtc:8982)
-#include "rtc_base/ignore_wundef.h"
#include "rtc_base/protobuf_utils.h"
-RTC_PUSH_IGNORING_WUNDEF()
+// Generated at build-time by the protobuf compiler.
#include "modules/audio_processing/debug.pb.h"
-RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
diff --git a/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_api_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_api_gn/moz.build
index 6310b948ac..44571715b8 100644
--- a/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_api_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_api_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_impl_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_impl_gn/moz.build
index 31e0736f30..d700fc1a32 100644
--- a/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_impl_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/transient/transient_suppressor_impl_gn/moz.build
@@ -203,7 +203,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -213,10 +212,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/transient/voice_probability_delay_unit_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/transient/voice_probability_delay_unit_gn/moz.build
index 52587c0890..c67675f431 100644
--- a/third_party/libwebrtc/modules/audio_processing/transient/voice_probability_delay_unit_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/transient/voice_probability_delay_unit_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/utility/cascaded_biquad_filter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/utility/cascaded_biquad_filter_gn/moz.build
index 02813d2513..b6566a8950 100644
--- a/third_party/libwebrtc/modules/audio_processing/utility/cascaded_biquad_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/utility/cascaded_biquad_filter_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/utility/legacy_delay_estimator_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/utility/legacy_delay_estimator_gn/moz.build
index 67c6a218f6..c20d5b6189 100644
--- a/third_party/libwebrtc/modules/audio_processing/utility/legacy_delay_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/utility/legacy_delay_estimator_gn/moz.build
@@ -189,7 +189,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -199,10 +198,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/utility/pffft_wrapper_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/utility/pffft_wrapper_gn/moz.build
index 3213706005..936decab70 100644
--- a/third_party/libwebrtc/modules/audio_processing/utility/pffft_wrapper_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/utility/pffft_wrapper_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/audio_processing/vad/vad_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/vad/vad_gn/moz.build
index 3739bbef8a..0ae31f5a2e 100644
--- a/third_party/libwebrtc/modules/audio_processing/vad/vad_gn/moz.build
+++ b/third_party/libwebrtc/modules/audio_processing/vad/vad_gn/moz.build
@@ -206,7 +206,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -216,10 +215,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/congestion_controller_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/congestion_controller_gn/moz.build
index b5bcafa45f..1190193b94 100644
--- a/third_party/libwebrtc/modules/congestion_controller/congestion_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/congestion_controller_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build
index b48fc38c39..40fd1189aa 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe_gn/moz.build
index 31d8c420f6..e2087c6126 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/delay_based_bwe_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/estimators_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/estimators_gn/moz.build
index e233806b43..7b77d3dc86 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/estimators_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/estimators_gn/moz.build
@@ -205,7 +205,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -215,10 +214,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_gn/moz.build
index 147a08113b..0e5182a469 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_gn/moz.build
@@ -201,7 +201,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -211,10 +210,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/link_capacity_estimator_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/link_capacity_estimator_gn/moz.build
index 0ee8a34df8..04b78b5988 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/link_capacity_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/link_capacity_estimator_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build
index 5931292efe..d290fbe9ec 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build
@@ -196,7 +196,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -206,10 +205,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc
index ef200869a6..8e1a3c4698 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc
@@ -132,7 +132,7 @@ LossBasedBweV2::LossBasedBweV2(const FieldTrialsView* key_value_config)
instant_upper_bound_temporal_weights_.resize(
config_->observation_window_size);
CalculateTemporalWeights();
- hold_duration_ = kInitHoldDuration;
+ last_hold_info_.duration = kInitHoldDuration;
}
bool LossBasedBweV2::IsEnabled() const {
@@ -149,6 +149,10 @@ bool LossBasedBweV2::ReadyToUseInStartPhase() const {
return IsReady() && config_->use_in_start_phase;
}
+bool LossBasedBweV2::UseInStartPhase() const {
+ return config_->use_in_start_phase;
+}
+
LossBasedBweV2::Result LossBasedBweV2::GetLossBasedResult() const {
if (!IsReady()) {
if (!IsEnabled()) {
@@ -289,50 +293,69 @@ void LossBasedBweV2::UpdateBandwidthEstimate(
/*new_estimate=*/best_candidate.loss_limited_bandwidth);
// Bound the best candidate by the acked bitrate.
if (increasing_when_loss_limited && IsValid(acknowledged_bitrate_)) {
+ double rampup_factor = config_->bandwidth_rampup_upper_bound_factor;
+ if (IsValid(last_hold_info_.rate) &&
+ acknowledged_bitrate_ <
+ config_->bandwidth_rampup_hold_threshold * last_hold_info_.rate) {
+ rampup_factor = config_->bandwidth_rampup_upper_bound_factor_in_hold;
+ }
+
best_candidate.loss_limited_bandwidth =
std::max(current_best_estimate_.loss_limited_bandwidth,
std::min(best_candidate.loss_limited_bandwidth,
- config_->bandwidth_rampup_upper_bound_factor *
- (*acknowledged_bitrate_)));
+ rampup_factor * (*acknowledged_bitrate_)));
+ // Increase current estimate by at least 1kbps to make sure that the state
+ // will be switched to kIncreasing, thus padding is triggered.
+ if (loss_based_result_.state == LossBasedState::kDecreasing &&
+ best_candidate.loss_limited_bandwidth ==
+ current_best_estimate_.loss_limited_bandwidth) {
+ best_candidate.loss_limited_bandwidth =
+ current_best_estimate_.loss_limited_bandwidth +
+ DataRate::BitsPerSec(1);
+ }
}
}
- current_best_estimate_ = best_candidate;
- UpdateResult();
-
- if (IsInLossLimitedState() &&
- (recovering_after_loss_timestamp_.IsInfinite() ||
- recovering_after_loss_timestamp_ + config_->delayed_increase_window <
- last_send_time_most_recent_observation_)) {
- bandwidth_limit_in_current_window_ =
- std::max(kCongestionControllerMinBitrate,
- current_best_estimate_.loss_limited_bandwidth *
- config_->max_increase_factor);
- recovering_after_loss_timestamp_ = last_send_time_most_recent_observation_;
- }
-}
-
-void LossBasedBweV2::UpdateResult() {
DataRate bounded_bandwidth_estimate = DataRate::PlusInfinity();
if (IsValid(delay_based_estimate_)) {
bounded_bandwidth_estimate =
std::max(GetInstantLowerBound(),
- std::min({current_best_estimate_.loss_limited_bandwidth,
+ std::min({best_candidate.loss_limited_bandwidth,
GetInstantUpperBound(), delay_based_estimate_}));
} else {
- bounded_bandwidth_estimate =
- std::max(GetInstantLowerBound(),
- std::min(current_best_estimate_.loss_limited_bandwidth,
- GetInstantUpperBound()));
+ bounded_bandwidth_estimate = std::max(
+ GetInstantLowerBound(), std::min(best_candidate.loss_limited_bandwidth,
+ GetInstantUpperBound()));
+ }
+ if (config_->bound_best_candidate &&
+ bounded_bandwidth_estimate < best_candidate.loss_limited_bandwidth) {
+ RTC_LOG(LS_INFO) << "Resetting loss based BWE to "
+ << bounded_bandwidth_estimate.kbps()
+ << "due to loss. Avg loss rate: "
+ << GetAverageReportedLossRatio();
+ current_best_estimate_.loss_limited_bandwidth = bounded_bandwidth_estimate;
+ current_best_estimate_.inherent_loss = 0;
+ } else {
+ current_best_estimate_ = best_candidate;
+ if (config_->lower_bound_by_acked_rate_factor > 0.0) {
+ current_best_estimate_.loss_limited_bandwidth =
+ std::max(current_best_estimate_.loss_limited_bandwidth,
+ GetInstantLowerBound());
+ }
}
if (loss_based_result_.state == LossBasedState::kDecreasing &&
- last_hold_timestamp_ > last_send_time_most_recent_observation_ &&
+ last_hold_info_.timestamp > last_send_time_most_recent_observation_ &&
bounded_bandwidth_estimate < delay_based_estimate_) {
- // BWE is not allowed to increase during the HOLD duration. The purpose of
+ // Ensure that acked rate is the lower bound of HOLD rate.
+ if (config_->lower_bound_by_acked_rate_factor > 0.0) {
+ last_hold_info_.rate =
+ std::max(GetInstantLowerBound(), last_hold_info_.rate);
+ }
+ // BWE is not allowed to increase above the HOLD rate. The purpose of
// HOLD is to not immediately ramp up BWE to a rate that may cause loss.
- loss_based_result_.bandwidth_estimate = std::min(
- loss_based_result_.bandwidth_estimate, bounded_bandwidth_estimate);
+ loss_based_result_.bandwidth_estimate =
+ std::min(last_hold_info_.rate, bounded_bandwidth_estimate);
return;
}
@@ -359,22 +382,38 @@ void LossBasedBweV2::UpdateResult() {
RTC_LOG(LS_INFO) << this << " "
<< "Switch to HOLD. Bounded BWE: "
<< bounded_bandwidth_estimate.kbps()
- << ", duration: " << hold_duration_.seconds();
- last_hold_timestamp_ =
- last_send_time_most_recent_observation_ + hold_duration_;
- hold_duration_ = std::min(kMaxHoldDuration,
- hold_duration_ * config_->hold_duration_factor);
+ << ", duration: " << last_hold_info_.duration.ms();
+ last_hold_info_ = {
+ .timestamp = last_send_time_most_recent_observation_ +
+ last_hold_info_.duration,
+ .duration =
+ std::min(kMaxHoldDuration, last_hold_info_.duration *
+ config_->hold_duration_factor),
+ .rate = bounded_bandwidth_estimate};
}
last_padding_info_ = PaddingInfo();
loss_based_result_.state = LossBasedState::kDecreasing;
} else {
- // Reset the HOLD duration if delay based estimate works to avoid getting
+ // Reset the HOLD info if delay based estimate works to avoid getting
// stuck in low bitrate.
- hold_duration_ = kInitHoldDuration;
+ last_hold_info_ = {.timestamp = Timestamp::MinusInfinity(),
+ .duration = kInitHoldDuration,
+ .rate = DataRate::PlusInfinity()};
last_padding_info_ = PaddingInfo();
loss_based_result_.state = LossBasedState::kDelayBasedEstimate;
}
loss_based_result_.bandwidth_estimate = bounded_bandwidth_estimate;
+
+ if (IsInLossLimitedState() &&
+ (recovering_after_loss_timestamp_.IsInfinite() ||
+ recovering_after_loss_timestamp_ + config_->delayed_increase_window <
+ last_send_time_most_recent_observation_)) {
+ bandwidth_limit_in_current_window_ =
+ std::max(kCongestionControllerMinBitrate,
+ current_best_estimate_.loss_limited_bandwidth *
+ config_->max_increase_factor);
+ recovering_after_loss_timestamp_ = last_send_time_most_recent_observation_;
+ }
}
bool LossBasedBweV2::IsEstimateIncreasingWhenLossLimited(
@@ -394,6 +433,10 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
FieldTrialParameter<bool> enabled("Enabled", true);
FieldTrialParameter<double> bandwidth_rampup_upper_bound_factor(
"BwRampupUpperBoundFactor", 1000000.0);
+ FieldTrialParameter<double> bandwidth_rampup_upper_bound_factor_in_hold(
+ "BwRampupUpperBoundInHoldFactor", 1000000.0);
+ FieldTrialParameter<double> bandwidth_rampup_hold_threshold(
+ "BwRampupUpperBoundHoldThreshold", 1.3);
FieldTrialParameter<double> rampup_acceleration_max_factor(
"BwRampupAccelMaxFactor", 0.0);
FieldTrialParameter<TimeDelta> rampup_acceleration_maxout_time(
@@ -445,12 +488,6 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
FieldTrialParameter<bool>
not_increase_if_inherent_loss_less_than_average_loss(
"NotIncreaseIfInherentLossLessThanAverageLoss", true);
- FieldTrialParameter<double> high_loss_rate_threshold("HighLossRateThreshold",
- 1.0);
- FieldTrialParameter<DataRate> bandwidth_cap_at_high_loss_rate(
- "BandwidthCapAtHighLossRate", DataRate::KilobitsPerSec(500.0));
- FieldTrialParameter<double> slope_of_bwe_high_loss_func(
- "SlopeOfBweHighLossFunc", 1000);
FieldTrialParameter<bool> not_use_acked_rate_in_alr("NotUseAckedRateInAlr",
true);
FieldTrialParameter<bool> use_in_start_phase("UseInStartPhase", false);
@@ -461,9 +498,12 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
FieldTrialParameter<bool> use_byte_loss_rate("UseByteLossRate", false);
FieldTrialParameter<TimeDelta> padding_duration("PaddingDuration",
TimeDelta::Zero());
+ FieldTrialParameter<bool> bound_best_candidate("BoundBestCandidate", false);
if (key_value_config) {
ParseFieldTrial({&enabled,
&bandwidth_rampup_upper_bound_factor,
+ &bandwidth_rampup_upper_bound_factor_in_hold,
+ &bandwidth_rampup_hold_threshold,
&rampup_acceleration_max_factor,
&rampup_acceleration_maxout_time,
&candidate_factors,
@@ -491,16 +531,14 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
&max_increase_factor,
&delayed_increase_window,
&not_increase_if_inherent_loss_less_than_average_loss,
- &high_loss_rate_threshold,
- &bandwidth_cap_at_high_loss_rate,
- &slope_of_bwe_high_loss_func,
&not_use_acked_rate_in_alr,
&use_in_start_phase,
&min_num_observations,
&lower_bound_by_acked_rate_factor,
&hold_duration_factor,
&use_byte_loss_rate,
- &padding_duration},
+ &padding_duration,
+ &bound_best_candidate},
key_value_config->Lookup("WebRTC-Bwe-LossBasedBweV2"));
}
@@ -511,6 +549,10 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
config.emplace(Config());
config->bandwidth_rampup_upper_bound_factor =
bandwidth_rampup_upper_bound_factor.Get();
+ config->bandwidth_rampup_upper_bound_factor_in_hold =
+ bandwidth_rampup_upper_bound_factor_in_hold.Get();
+ config->bandwidth_rampup_hold_threshold =
+ bandwidth_rampup_hold_threshold.Get();
config->rampup_acceleration_max_factor = rampup_acceleration_max_factor.Get();
config->rampup_acceleration_maxout_time =
rampup_acceleration_maxout_time.Get();
@@ -553,10 +595,6 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
config->delayed_increase_window = delayed_increase_window.Get();
config->not_increase_if_inherent_loss_less_than_average_loss =
not_increase_if_inherent_loss_less_than_average_loss.Get();
- config->high_loss_rate_threshold = high_loss_rate_threshold.Get();
- config->bandwidth_cap_at_high_loss_rate =
- bandwidth_cap_at_high_loss_rate.Get();
- config->slope_of_bwe_high_loss_func = slope_of_bwe_high_loss_func.Get();
config->not_use_acked_rate_in_alr = not_use_acked_rate_in_alr.Get();
config->use_in_start_phase = use_in_start_phase.Get();
config->min_num_observations = min_num_observations.Get();
@@ -565,7 +603,7 @@ absl::optional<LossBasedBweV2::Config> LossBasedBweV2::CreateConfig(
config->hold_duration_factor = hold_duration_factor.Get();
config->use_byte_loss_rate = use_byte_loss_rate.Get();
config->padding_duration = padding_duration.Get();
-
+ config->bound_best_candidate = bound_best_candidate.Get();
return config;
}
@@ -582,6 +620,18 @@ bool LossBasedBweV2::IsConfigValid() const {
<< config_->bandwidth_rampup_upper_bound_factor;
valid = false;
}
+ if (config_->bandwidth_rampup_upper_bound_factor_in_hold <= 1.0) {
+ RTC_LOG(LS_WARNING) << "The bandwidth rampup upper bound factor in hold "
+ "must be greater than 1: "
+ << config_->bandwidth_rampup_upper_bound_factor_in_hold;
+ valid = false;
+ }
+ if (config_->bandwidth_rampup_hold_threshold < 0.0) {
+ RTC_LOG(LS_WARNING) << "The bandwidth rampup hold threshold must"
+ "must be non-negative.: "
+ << config_->bandwidth_rampup_hold_threshold;
+ valid = false;
+ }
if (config_->rampup_acceleration_max_factor < 0.0) {
RTC_LOG(LS_WARNING)
<< "The rampup acceleration max factor must be non-negative.: "
@@ -739,12 +789,6 @@ bool LossBasedBweV2::IsConfigValid() const {
<< config_->delayed_increase_window.ms();
valid = false;
}
- if (config_->high_loss_rate_threshold <= 0.0 ||
- config_->high_loss_rate_threshold > 1.0) {
- RTC_LOG(LS_WARNING) << "The high loss rate threshold must be in (0, 1]: "
- << config_->high_loss_rate_threshold;
- valid = false;
- }
if (config_->min_num_observations <= 0) {
RTC_LOG(LS_WARNING) << "The min number of observations must be positive: "
<< config_->min_num_observations;
@@ -834,15 +878,19 @@ DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const {
std::vector<LossBasedBweV2::ChannelParameters> LossBasedBweV2::GetCandidates(
bool in_alr) const {
+ ChannelParameters best_estimate = current_best_estimate_;
std::vector<DataRate> bandwidths;
for (double candidate_factor : config_->candidate_factors) {
bandwidths.push_back(candidate_factor *
- current_best_estimate_.loss_limited_bandwidth);
+ best_estimate.loss_limited_bandwidth);
}
if (acknowledged_bitrate_.has_value() &&
config_->append_acknowledged_rate_candidate) {
- if (!(config_->not_use_acked_rate_in_alr && in_alr)) {
+ if (!(config_->not_use_acked_rate_in_alr && in_alr) ||
+ (config_->padding_duration > TimeDelta::Zero() &&
+ last_padding_info_.padding_timestamp + config_->padding_duration >=
+ last_send_time_most_recent_observation_)) {
bandwidths.push_back(*acknowledged_bitrate_ *
config_->bandwidth_backoff_lower_bound_factor);
}
@@ -850,13 +898,13 @@ std::vector<LossBasedBweV2::ChannelParameters> LossBasedBweV2::GetCandidates(
if (IsValid(delay_based_estimate_) &&
config_->append_delay_based_estimate_candidate) {
- if (delay_based_estimate_ > current_best_estimate_.loss_limited_bandwidth) {
+ if (delay_based_estimate_ > best_estimate.loss_limited_bandwidth) {
bandwidths.push_back(delay_based_estimate_);
}
}
if (in_alr && config_->append_upper_bound_candidate_in_alr &&
- current_best_estimate_.loss_limited_bandwidth > GetInstantUpperBound()) {
+ best_estimate.loss_limited_bandwidth > GetInstantUpperBound()) {
bandwidths.push_back(GetInstantUpperBound());
}
@@ -866,10 +914,10 @@ std::vector<LossBasedBweV2::ChannelParameters> LossBasedBweV2::GetCandidates(
std::vector<ChannelParameters> candidates;
candidates.resize(bandwidths.size());
for (size_t i = 0; i < bandwidths.size(); ++i) {
- ChannelParameters candidate = current_best_estimate_;
- candidate.loss_limited_bandwidth = std::min(
- bandwidths[i], std::max(current_best_estimate_.loss_limited_bandwidth,
- candidate_bandwidth_upper_bound));
+ ChannelParameters candidate = best_estimate;
+ candidate.loss_limited_bandwidth =
+ std::min(bandwidths[i], std::max(best_estimate.loss_limited_bandwidth,
+ candidate_bandwidth_upper_bound));
candidate.inherent_loss = GetFeasibleInherentLoss(candidate);
candidates[i] = candidate;
}
@@ -1037,14 +1085,6 @@ void LossBasedBweV2::CalculateInstantUpperBound() {
instant_limit = config_->instant_upper_bound_bandwidth_balance /
(average_reported_loss_ratio -
config_->instant_upper_bound_loss_offset);
- if (average_reported_loss_ratio > config_->high_loss_rate_threshold) {
- instant_limit = std::min(
- instant_limit, DataRate::KilobitsPerSec(std::max(
- static_cast<double>(min_bitrate_.kbps()),
- config_->bandwidth_cap_at_high_loss_rate.kbps() -
- config_->slope_of_bwe_high_loss_func *
- average_reported_loss_ratio)));
- }
}
cached_instant_upper_bound_ = instant_limit;
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h
index 425ca2a0c8..9afbb11f1f 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h
@@ -62,6 +62,9 @@ class LossBasedBweV2 {
// Returns true if loss based BWE is ready to be used in the start phase.
bool ReadyToUseInStartPhase() const;
+ // Returns true if loss based BWE can be used in the start phase.
+ bool UseInStartPhase() const;
+
// Returns `DataRate::PlusInfinity` if no BWE can be calculated.
Result GetLossBasedResult() const;
@@ -83,6 +86,8 @@ class LossBasedBweV2 {
struct Config {
double bandwidth_rampup_upper_bound_factor = 0.0;
+ double bandwidth_rampup_upper_bound_factor_in_hold = 0;
+ double bandwidth_rampup_hold_threshold = 0;
double rampup_acceleration_max_factor = 0.0;
TimeDelta rampup_acceleration_maxout_time = TimeDelta::Zero();
std::vector<double> candidate_factors;
@@ -111,9 +116,6 @@ class LossBasedBweV2 {
double max_increase_factor = 0.0;
TimeDelta delayed_increase_window = TimeDelta::Zero();
bool not_increase_if_inherent_loss_less_than_average_loss = false;
- double high_loss_rate_threshold = 1.0;
- DataRate bandwidth_cap_at_high_loss_rate = DataRate::MinusInfinity();
- double slope_of_bwe_high_loss_func = 1000.0;
bool not_use_acked_rate_in_alr = false;
bool use_in_start_phase = false;
int min_num_observations = 0;
@@ -121,6 +123,7 @@ class LossBasedBweV2 {
double hold_duration_factor = 0.0;
bool use_byte_loss_rate = false;
TimeDelta padding_duration = TimeDelta::Zero();
+ bool bound_best_candidate = false;
};
struct Derivatives {
@@ -152,6 +155,12 @@ class LossBasedBweV2 {
Timestamp padding_timestamp = Timestamp::MinusInfinity();
};
+ struct HoldInfo {
+ Timestamp timestamp = Timestamp::MinusInfinity();
+ TimeDelta duration = TimeDelta::Zero();
+ DataRate rate = DataRate::PlusInfinity();
+ };
+
static absl::optional<Config> CreateConfig(
const FieldTrialsView* key_value_config);
bool IsConfigValid() const;
@@ -180,7 +189,6 @@ class LossBasedBweV2 {
// Returns false if no observation was created.
bool PushBackObservation(rtc::ArrayView<const PacketResult> packet_results);
- void UpdateResult();
bool IsEstimateIncreasingWhenLossLimited(DataRate old_estimate,
DataRate new_estimate);
bool IsInLossLimitedState() const;
@@ -204,8 +212,7 @@ class LossBasedBweV2 {
DataRate max_bitrate_ = DataRate::PlusInfinity();
DataRate delay_based_estimate_ = DataRate::PlusInfinity();
LossBasedBweV2::Result loss_based_result_ = LossBasedBweV2::Result();
- Timestamp last_hold_timestamp_ = Timestamp::MinusInfinity();
- TimeDelta hold_duration_ = TimeDelta::Zero();
+ HoldInfo last_hold_info_ = HoldInfo();
PaddingInfo last_padding_info_ = PaddingInfo();
};
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_gn/moz.build
index ca9f20ab87..709bcdb937 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc
index 347e2a86d1..9b7ad03148 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc
@@ -795,7 +795,7 @@ TEST_F(LossBasedBweV2Test,
// The estimate is capped by acked_bitrate * BwRampupUpperBoundFactor.
EXPECT_EQ(result.bandwidth_estimate, estimate_1 * 0.9 * 1.2);
- // But if acked bitrate decrease, BWE does not decrease when there is no
+ // But if acked bitrate decreases, BWE does not decrease when there is no
// loss.
loss_based_bandwidth_estimator.SetAcknowledgedBitrate(estimate_1 * 0.9);
loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
@@ -809,6 +809,53 @@ TEST_F(LossBasedBweV2Test,
result.bandwidth_estimate);
}
+// Ensure that the state can switch to kIncrease even when the bandwidth is
+// bounded by acked bitrate.
+TEST_F(LossBasedBweV2Test, EnsureIncreaseEvenIfAckedBitrateBound) {
+ ExplicitKeyValueConfig key_value_config(ShortObservationConfig(
+ "LossThresholdOfHighBandwidthPreference:0.99,"
+ "BwRampupUpperBoundFactor:1.2,"
+ // Set InstantUpperBoundBwBalance high to disable InstantUpperBound cap.
+ "InstantUpperBoundBwBalance:10000kbps,"));
+ std::vector<PacketResult> enough_feedback_1 =
+ CreatePacketResultsWith100pLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero());
+ LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
+ DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000);
+
+ loss_based_bandwidth_estimator.SetBandwidthEstimate(
+ DataRate::KilobitsPerSec(600));
+ loss_based_bandwidth_estimator.SetAcknowledgedBitrate(
+ DataRate::KilobitsPerSec(300));
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1,
+ delay_based_estimate,
+ /*in_alr=*/false);
+ ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
+ LossBasedState::kDecreasing);
+ LossBasedBweV2::Result result =
+ loss_based_bandwidth_estimator.GetLossBasedResult();
+ DataRate estimate_1 = result.bandwidth_estimate;
+ ASSERT_LT(estimate_1.kbps(), 600);
+
+ // Set a low acked bitrate.
+ loss_based_bandwidth_estimator.SetAcknowledgedBitrate(estimate_1 / 2);
+
+ int feedback_count = 1;
+ while (feedback_count < 5 && result.state != LossBasedState::kIncreasing) {
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ feedback_count++ * kObservationDurationLowerBound),
+ delay_based_estimate,
+ /*in_alr=*/false);
+ result = loss_based_bandwidth_estimator.GetLossBasedResult();
+ }
+
+ ASSERT_EQ(result.state, LossBasedState::kIncreasing);
+ // The estimate increases by 1kbps.
+ EXPECT_EQ(result.bandwidth_estimate, estimate_1 + DataRate::BitsPerSec(1));
+}
+
// After loss based bwe backs off, the estimate is bounded during the delayed
// window.
TEST_F(LossBasedBweV2Test,
@@ -1007,164 +1054,6 @@ TEST_F(LossBasedBweV2Test,
DataRate::KilobitsPerSec(600));
}
-TEST_F(LossBasedBweV2Test,
- StricterBoundUsingHighLossRateThresholdAt10pLossRate) {
- ExplicitKeyValueConfig key_value_config(
- ShortObservationConfig("HighLossRateThreshold:0.09"));
- LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
- loss_based_bandwidth_estimator.SetMinMaxBitrate(
- /*min_bitrate=*/DataRate::KilobitsPerSec(10),
- /*max_bitrate=*/DataRate::KilobitsPerSec(1000000));
- DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000);
- loss_based_bandwidth_estimator.SetBandwidthEstimate(
- DataRate::KilobitsPerSec(600));
-
- std::vector<PacketResult> enough_feedback_10p_loss_1 =
- CreatePacketResultsWith10pPacketLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero());
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_10p_loss_1, delay_based_estimate,
-
- /*in_alr=*/false);
-
- std::vector<PacketResult> enough_feedback_10p_loss_2 =
- CreatePacketResultsWith10pPacketLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound);
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_10p_loss_2, delay_based_estimate,
-
- /*in_alr=*/false);
-
- // At 10% loss rate and high loss rate threshold to be 10%, cap the estimate
- // to be 500 * 1000-0.1 = 400kbps.
- EXPECT_EQ(
- loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- DataRate::KilobitsPerSec(400));
-}
-
-TEST_F(LossBasedBweV2Test,
- StricterBoundUsingHighLossRateThresholdAt50pLossRate) {
- ExplicitKeyValueConfig key_value_config(
- ShortObservationConfig("HighLossRateThreshold:0.3"));
- LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
- loss_based_bandwidth_estimator.SetMinMaxBitrate(
- /*min_bitrate=*/DataRate::KilobitsPerSec(10),
- /*max_bitrate=*/DataRate::KilobitsPerSec(1000000));
- DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000);
- loss_based_bandwidth_estimator.SetBandwidthEstimate(
- DataRate::KilobitsPerSec(600));
-
- std::vector<PacketResult> enough_feedback_50p_loss_1 =
- CreatePacketResultsWith50pPacketLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero());
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_50p_loss_1, delay_based_estimate,
-
- /*in_alr=*/false);
-
- std::vector<PacketResult> enough_feedback_50p_loss_2 =
- CreatePacketResultsWith50pPacketLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound);
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_50p_loss_2, delay_based_estimate,
-
- /*in_alr=*/false);
-
- // At 50% loss rate and high loss rate threshold to be 30%, cap the estimate
- // to be the min bitrate.
- EXPECT_EQ(
- loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- DataRate::KilobitsPerSec(10));
-}
-
-TEST_F(LossBasedBweV2Test,
- StricterBoundUsingHighLossRateThresholdAt100pLossRate) {
- ExplicitKeyValueConfig key_value_config(
- ShortObservationConfig("HighLossRateThreshold:0.3"));
- LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
- loss_based_bandwidth_estimator.SetMinMaxBitrate(
- /*min_bitrate=*/DataRate::KilobitsPerSec(10),
- /*max_bitrate=*/DataRate::KilobitsPerSec(1000000));
- DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000);
- loss_based_bandwidth_estimator.SetBandwidthEstimate(
- DataRate::KilobitsPerSec(600));
-
- std::vector<PacketResult> enough_feedback_100p_loss_1 =
- CreatePacketResultsWith100pLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero());
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_100p_loss_1, delay_based_estimate,
-
- /*in_alr=*/false);
-
- std::vector<PacketResult> enough_feedback_100p_loss_2 =
- CreatePacketResultsWith100pLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound);
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_100p_loss_2, delay_based_estimate,
-
- /*in_alr=*/false);
-
- // At 100% loss rate and high loss rate threshold to be 30%, cap the estimate
- // to be the min bitrate.
- EXPECT_EQ(
- loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- DataRate::KilobitsPerSec(10));
-}
-
-TEST_F(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) {
- ExplicitKeyValueConfig key_value_config(
- ShortObservationConfig("HighLossRateThreshold:0.3"));
- LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
- loss_based_bandwidth_estimator.SetMinMaxBitrate(
- /*min_bitrate=*/DataRate::KilobitsPerSec(10),
- /*max_bitrate=*/DataRate::KilobitsPerSec(1000000));
- DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000);
- loss_based_bandwidth_estimator.SetBandwidthEstimate(
- DataRate::KilobitsPerSec(600));
-
- std::vector<PacketResult> enough_feedback_100p_loss_1 =
- CreatePacketResultsWith100pLossRate(
- /*first_packet_timestamp=*/Timestamp::Zero());
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_100p_loss_1, delay_based_estimate,
-
- /*in_alr=*/false);
-
- // Make sure that the estimate is set to min bitrate because of 100% loss
- // rate.
- EXPECT_EQ(
- loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- DataRate::KilobitsPerSec(10));
-
- // Create some feedbacks with 0 loss rate to simulate network recovering.
- std::vector<PacketResult> enough_feedback_0p_loss_1 =
- CreatePacketResultsWithReceivedPackets(
- /*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound);
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_0p_loss_1, delay_based_estimate,
-
- /*in_alr=*/false);
-
- std::vector<PacketResult> enough_feedback_0p_loss_2 =
- CreatePacketResultsWithReceivedPackets(
- /*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound * 2);
- loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- enough_feedback_0p_loss_2, delay_based_estimate,
-
- /*in_alr=*/false);
-
- // The estimate increases as network recovers.
- EXPECT_GT(
- loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- DataRate::KilobitsPerSec(10));
-}
-
TEST_F(LossBasedBweV2Test, EstimateIsNotHigherThanMaxBitrate) {
ExplicitKeyValueConfig key_value_config(
Config(/*enabled=*/true, /*valid=*/true));
@@ -1494,6 +1383,92 @@ TEST_F(LossBasedBweV2Test, IncreaseUsingPaddingStateIfFieldTrial) {
LossBasedState::kIncreaseUsingPadding);
}
+TEST_F(LossBasedBweV2Test, BestCandidateResetsToUpperBoundInFieldTrial) {
+ ExplicitKeyValueConfig key_value_config(
+ ShortObservationConfig("PaddingDuration:1000ms,BoundBestCandidate:true"));
+ LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
+ loss_based_bandwidth_estimator.SetBandwidthEstimate(
+ DataRate::KilobitsPerSec(2500));
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith50pPacketLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero()),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ LossBasedBweV2::Result result_after_loss =
+ loss_based_bandwidth_estimator.GetLossBasedResult();
+ ASSERT_EQ(result_after_loss.state, LossBasedState::kDecreasing);
+
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ 2 * kObservationDurationLowerBound),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ // After a BWE decrease due to large loss, BWE is expected to ramp up slowly
+ // and follow the acked bitrate.
+ EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
+ LossBasedState::kIncreaseUsingPadding);
+ EXPECT_NEAR(loss_based_bandwidth_estimator.GetLossBasedResult()
+ .bandwidth_estimate.kbps(),
+ result_after_loss.bandwidth_estimate.kbps(), 100);
+}
+
+TEST_F(LossBasedBweV2Test, DecreaseToAckedCandidateIfPaddingInAlr) {
+ ExplicitKeyValueConfig key_value_config(ShortObservationConfig(
+ "PaddingDuration:1000ms,"
+ // Set InstantUpperBoundBwBalance high to disable InstantUpperBound cap.
+ "InstantUpperBoundBwBalance:10000kbps"));
+ LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
+ loss_based_bandwidth_estimator.SetBandwidthEstimate(
+ DataRate::KilobitsPerSec(1000));
+ int feedback_id = 0;
+ while (loss_based_bandwidth_estimator.GetLossBasedResult().state !=
+ LossBasedState::kDecreasing) {
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith100pLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * feedback_id),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ feedback_id++;
+ }
+
+ while (loss_based_bandwidth_estimator.GetLossBasedResult().state !=
+ LossBasedState::kIncreaseUsingPadding) {
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * feedback_id),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ feedback_id++;
+ }
+ ASSERT_GT(
+ loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
+ DataRate::KilobitsPerSec(900));
+
+ loss_based_bandwidth_estimator.SetAcknowledgedBitrate(
+ DataRate::KilobitsPerSec(100));
+ // Padding is sent now, create some lost packets.
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith100pLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * feedback_id),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/true);
+ EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
+ LossBasedState::kDecreasing);
+ EXPECT_EQ(
+ loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
+ DataRate::KilobitsPerSec(100));
+}
+
TEST_F(LossBasedBweV2Test, DecreaseAfterPadding) {
ExplicitKeyValueConfig key_value_config(ShortObservationConfig(
"PaddingDuration:1000ms,BwRampupUpperBoundFactor:2.0"));
@@ -1580,7 +1555,7 @@ TEST_F(LossBasedBweV2Test, IncreaseEstimateIfNotHold) {
TEST_F(LossBasedBweV2Test, IncreaseEstimateAfterHoldDuration) {
ExplicitKeyValueConfig key_value_config(
- ShortObservationConfig("HoldDurationFactor:3"));
+ ShortObservationConfig("HoldDurationFactor:10"));
LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
loss_based_bandwidth_estimator.SetBandwidthEstimate(
DataRate::KilobitsPerSec(2500));
@@ -1629,36 +1604,126 @@ TEST_F(LossBasedBweV2Test, IncreaseEstimateAfterHoldDuration) {
/*in_alr=*/false);
EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
LossBasedState::kDecreasing);
- estimate =
+ DataRate estimate_at_hold =
loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate;
- // During the hold duration, e.g. next 900ms, the estimate cannot increase.
+ // In the hold duration, e.g. next 3s, the estimate cannot increase above the
+ // hold rate. Get some lost packets to get lower estimate than the HOLD rate.
for (int i = 4; i <= 6; ++i) {
loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- CreatePacketResultsWithReceivedPackets(
+ CreatePacketResultsWith100pLossRate(
/*first_packet_timestamp=*/Timestamp::Zero() +
kObservationDurationLowerBound * i),
/*delay_based_estimate=*/DataRate::PlusInfinity(),
/*in_alr=*/false);
EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
LossBasedState::kDecreasing);
- EXPECT_EQ(
+ EXPECT_LT(
loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- estimate);
+ estimate_at_hold);
+ }
+
+ int feedback_id = 7;
+ while (loss_based_bandwidth_estimator.GetLossBasedResult().state !=
+ LossBasedState::kIncreasing) {
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * feedback_id),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+ if (loss_based_bandwidth_estimator.GetLossBasedResult().state ==
+ LossBasedState::kDecreasing) {
+ // In the hold duration, the estimate can not go higher than estimate at
+ // hold.
+ EXPECT_LE(loss_based_bandwidth_estimator.GetLossBasedResult()
+ .bandwidth_estimate,
+ estimate_at_hold);
+ } else if (loss_based_bandwidth_estimator.GetLossBasedResult().state ==
+ LossBasedState::kIncreasing) {
+ // After the hold duration, the estimate can increase again.
+ EXPECT_GT(loss_based_bandwidth_estimator.GetLossBasedResult()
+ .bandwidth_estimate,
+ estimate_at_hold);
+ }
+ feedback_id++;
}
+}
- // After the hold duration, the estimate can increase again.
+TEST_F(LossBasedBweV2Test, HoldRateNotLowerThanAckedRate) {
+ ExplicitKeyValueConfig key_value_config(ShortObservationConfig(
+ "HoldDurationFactor:10,LowerBoundByAckedRateFactor:1.0"));
+ LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
+ loss_based_bandwidth_estimator.SetBandwidthEstimate(
+ DataRate::KilobitsPerSec(2500));
loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
- CreatePacketResultsWithReceivedPackets(
+ CreatePacketResultsWith50pPacketLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero()),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+ ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
+ LossBasedState::kDecreasing);
+
+ // During the hold duration, hold rate is not lower than the acked rate.
+ loss_based_bandwidth_estimator.SetAcknowledgedBitrate(
+ DataRate::KilobitsPerSec(1000));
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith50pPacketLossRate(
/*first_packet_timestamp=*/Timestamp::Zero() +
- kObservationDurationLowerBound * 7),
+ kObservationDurationLowerBound),
/*delay_based_estimate=*/DataRate::PlusInfinity(),
/*in_alr=*/false);
EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state,
- LossBasedState::kIncreasing);
- EXPECT_GE(
+ LossBasedState::kDecreasing);
+ EXPECT_EQ(
loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
- estimate);
+ DataRate::KilobitsPerSec(1000));
+}
+
+TEST_F(LossBasedBweV2Test, EstimateNotLowerThanAckedRate) {
+ ExplicitKeyValueConfig key_value_config(
+ ShortObservationConfig("LowerBoundByAckedRateFactor:1.0"));
+ LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config);
+ loss_based_bandwidth_estimator.SetBandwidthEstimate(
+ DataRate::KilobitsPerSec(2500));
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith100pLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero()),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+ ASSERT_LT(
+ loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
+ DataRate::KilobitsPerSec(1000));
+
+ loss_based_bandwidth_estimator.SetAcknowledgedBitrate(
+ DataRate::KilobitsPerSec(1000));
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWith100pLossRate(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+ EXPECT_EQ(
+ loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
+ DataRate::KilobitsPerSec(1000));
+
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * 2),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+ loss_based_bandwidth_estimator.UpdateBandwidthEstimate(
+ CreatePacketResultsWithReceivedPackets(
+ /*first_packet_timestamp=*/Timestamp::Zero() +
+ kObservationDurationLowerBound * 3),
+ /*delay_based_estimate=*/DataRate::PlusInfinity(),
+ /*in_alr=*/false);
+
+ // Verify that the estimate recovers from the acked rate.
+ EXPECT_GT(
+ loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate,
+ DataRate::KilobitsPerSec(1000));
}
TEST_F(LossBasedBweV2Test, EndHoldDurationIfDelayBasedEstimateWorks) {
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build
index 703c22a590..049ac6f477 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build
index 291502c95a..6e1d0acff5 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
index b09cb22f49..22693d67e9 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc
@@ -203,9 +203,9 @@ TimeDelta RttBasedBackoff::CorrectedRtt() const {
RttBasedBackoff::~RttBasedBackoff() = default;
SendSideBandwidthEstimation::SendSideBandwidthEstimation(
- const FieldTrialsView* key_value_config,
- RtcEventLog* event_log)
- : rtt_backoff_(key_value_config),
+ const FieldTrialsView* key_value_config, RtcEventLog* event_log)
+ : key_value_config_(key_value_config),
+ rtt_backoff_(key_value_config),
lost_packets_since_last_loss_update_(0),
expected_packets_since_last_loss_update_(0),
current_target_(DataRate::Zero()),
@@ -234,7 +234,7 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation(
high_loss_threshold_(kDefaultHighLossThreshold),
bitrate_threshold_(kDefaultBitrateThreshold),
loss_based_bandwidth_estimator_v1_(key_value_config),
- loss_based_bandwidth_estimator_v2_(key_value_config),
+ loss_based_bandwidth_estimator_v2_(new LossBasedBweV2(key_value_config)),
loss_based_state_(LossBasedState::kDelayBasedEstimate),
disable_receiver_limit_caps_only_("Disabled") {
RTC_DCHECK(event_log);
@@ -252,7 +252,7 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation(
ParseFieldTrial({&disable_receiver_limit_caps_only_},
key_value_config->Lookup("WebRTC-Bwe-ReceiverLimitCapsOnly"));
if (LossBasedBandwidthEstimatorV2Enabled()) {
- loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate(
+ loss_based_bandwidth_estimator_v2_->SetMinMaxBitrate(
min_bitrate_configured_, max_bitrate_configured_);
}
}
@@ -281,6 +281,10 @@ void SendSideBandwidthEstimation::OnRouteChange() {
uma_update_state_ = kNoUpdate;
uma_rtt_state_ = kNoUpdate;
last_rtc_event_log_ = Timestamp::MinusInfinity();
+ if (loss_based_bandwidth_estimator_v2_->UseInStartPhase()) {
+ loss_based_bandwidth_estimator_v2_.reset(
+ new LossBasedBweV2(key_value_config_));
+ }
}
void SendSideBandwidthEstimation::SetBitrates(
@@ -315,8 +319,8 @@ void SendSideBandwidthEstimation::SetMinMaxBitrate(DataRate min_bitrate,
} else {
max_bitrate_configured_ = kDefaultMaxBitrate;
}
- loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate(min_bitrate_configured_,
- max_bitrate_configured_);
+ loss_based_bandwidth_estimator_v2_->SetMinMaxBitrate(min_bitrate_configured_,
+ max_bitrate_configured_);
}
int SendSideBandwidthEstimation::GetMinBitrate() const {
@@ -371,7 +375,7 @@ void SendSideBandwidthEstimation::SetAcknowledgedRate(
*acknowledged_rate, at_time);
}
if (LossBasedBandwidthEstimatorV2Enabled()) {
- loss_based_bandwidth_estimator_v2_.SetAcknowledgedBitrate(
+ loss_based_bandwidth_estimator_v2_->SetAcknowledgedBitrate(
*acknowledged_rate);
}
}
@@ -386,7 +390,7 @@ void SendSideBandwidthEstimation::UpdateLossBasedEstimator(
report.packet_feedbacks, report.feedback_time);
}
if (LossBasedBandwidthEstimatorV2Enabled()) {
- loss_based_bandwidth_estimator_v2_.UpdateBandwidthEstimate(
+ loss_based_bandwidth_estimator_v2_->UpdateBandwidthEstimate(
report.packet_feedbacks, delay_based_limit_, in_alr);
UpdateEstimate(report.feedback_time);
}
@@ -492,7 +496,7 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) {
// We trust the REMB and/or delay-based estimate during the first 2 seconds if
// we haven't had any packet loss reported, to allow startup bitrate probing.
if (last_fraction_loss_ == 0 && IsInStartPhase(at_time) &&
- !loss_based_bandwidth_estimator_v2_.ReadyToUseInStartPhase()) {
+ !loss_based_bandwidth_estimator_v2_->ReadyToUseInStartPhase()) {
DataRate new_bitrate = current_target_;
// TODO(srte): We should not allow the new_bitrate to be larger than the
// receiver limit here.
@@ -534,7 +538,7 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) {
if (LossBasedBandwidthEstimatorV2ReadyForUse()) {
LossBasedBweV2::Result result =
- loss_based_bandwidth_estimator_v2_.GetLossBasedResult();
+ loss_based_bandwidth_estimator_v2_->GetLossBasedResult();
loss_based_state_ = result.state;
UpdateTargetBitrate(result.bandwidth_estimate, at_time);
return;
@@ -690,13 +694,13 @@ bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV1ReadyForUse()
}
bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV2Enabled() const {
- return loss_based_bandwidth_estimator_v2_.IsEnabled();
+ return loss_based_bandwidth_estimator_v2_->IsEnabled();
}
bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV2ReadyForUse()
const {
return LossBasedBandwidthEstimatorV2Enabled() &&
- loss_based_bandwidth_estimator_v2_.IsReady();
+ loss_based_bandwidth_estimator_v2_->IsReady();
}
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
index 3a4efc47c7..dd4d25a236 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h
@@ -16,6 +16,7 @@
#include <stdint.h>
#include <deque>
+#include <memory>
#include <utility>
#include <vector>
@@ -167,6 +168,7 @@ class SendSideBandwidthEstimation {
bool LossBasedBandwidthEstimatorV1ReadyForUse() const;
bool LossBasedBandwidthEstimatorV2ReadyForUse() const;
+ const FieldTrialsView* key_value_config_;
RttBasedBackoff rtt_backoff_;
LinkCapacityTracker link_capacity_;
@@ -208,7 +210,7 @@ class SendSideBandwidthEstimation {
float high_loss_threshold_;
DataRate bitrate_threshold_;
LossBasedBandwidthEstimation loss_based_bandwidth_estimator_v1_;
- LossBasedBweV2 loss_based_bandwidth_estimator_v2_;
+ std::unique_ptr<LossBasedBweV2> loss_based_bandwidth_estimator_v2_;
LossBasedState loss_based_state_;
FieldTrialFlag disable_receiver_limit_caps_only_;
};
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bwe_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bwe_gn/moz.build
index d83d51f985..08cfdec69b 100644
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bwe_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/send_side_bwe_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/rtp/control_handler_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/rtp/control_handler_gn/moz.build
index 7e8cb87820..62800e263d 100644
--- a/third_party/libwebrtc/modules/congestion_controller/rtp/control_handler_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/rtp/control_handler_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/congestion_controller/rtp/transport_feedback_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/rtp/transport_feedback_gn/moz.build
index 40ead5619c..41f64326b2 100644
--- a/third_party/libwebrtc/modules/congestion_controller/rtp/transport_feedback_gn/moz.build
+++ b/third_party/libwebrtc/modules/congestion_controller/rtp/transport_feedback_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc
index 40764de7ae..81caa9bd2d 100644
--- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc
+++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc
@@ -112,6 +112,7 @@ void BaseCapturerPipeWire::OnScreenCastSessionClosed() {
if (!capturer_failed_) {
options_.screencast_stream()->StopScreenCastStream();
}
+ capturer_failed_ = true;
}
void BaseCapturerPipeWire::UpdateResolution(uint32_t width, uint32_t height) {
diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc
index 61c6957d27..473f913466 100644
--- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc
+++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc
@@ -14,7 +14,6 @@
#include <libdrm/drm_fourcc.h>
#include <pipewire/pipewire.h>
#include <spa/param/video/format-utils.h>
-#include <sys/mman.h>
#include <vector>
@@ -49,33 +48,6 @@ constexpr int CursorMetaSize(int w, int h) {
constexpr PipeWireVersion kDmaBufModifierMinVersion = {0, 3, 33};
constexpr PipeWireVersion kDropSingleModifierMinVersion = {0, 3, 40};
-class ScopedBuf {
- public:
- ScopedBuf() {}
- ScopedBuf(uint8_t* map, int map_size, int fd)
- : map_(map), map_size_(map_size), fd_(fd) {}
- ~ScopedBuf() {
- if (map_ != MAP_FAILED) {
- munmap(map_, map_size_);
- }
- }
-
- explicit operator bool() { return map_ != MAP_FAILED; }
-
- void initialize(uint8_t* map, int map_size, int fd) {
- map_ = map;
- map_size_ = map_size;
- fd_ = fd;
- }
-
- uint8_t* get() { return map_; }
-
- protected:
- uint8_t* map_ = static_cast<uint8_t*>(MAP_FAILED);
- int map_size_;
- int fd_;
-};
-
class SharedScreenCastStreamPrivate {
public:
SharedScreenCastStreamPrivate();
diff --git a/third_party/libwebrtc/modules/desktop_capture/mac/desktop_frame_provider.h b/third_party/libwebrtc/modules/desktop_capture/mac/desktop_frame_provider.h
index aad28d2f30..64ef5750ec 100644
--- a/third_party/libwebrtc/modules/desktop_capture/mac/desktop_frame_provider.h
+++ b/third_party/libwebrtc/modules/desktop_capture/mac/desktop_frame_provider.h
@@ -46,6 +46,8 @@ class DesktopFrameProvider {
// Expected to be called before stopping the CGDisplayStreamRef streams.
void Release();
+ bool allow_iosurface() const { return allow_iosurface_; }
+
private:
SequenceChecker thread_checker_;
const bool allow_iosurface_;
diff --git a/third_party/libwebrtc/modules/desktop_capture/mac/screen_capturer_mac.mm b/third_party/libwebrtc/modules/desktop_capture/mac/screen_capturer_mac.mm
index 1f4a62f7cd..785a15dfa4 100644
--- a/third_party/libwebrtc/modules/desktop_capture/mac/screen_capturer_mac.mm
+++ b/third_party/libwebrtc/modules/desktop_capture/mac/screen_capturer_mac.mm
@@ -442,6 +442,10 @@ void ScreenCapturerMac::ScreenConfigurationChanged() {
bool ScreenCapturerMac::RegisterRefreshAndMoveHandlers() {
RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!desktop_frame_provider_.allow_iosurface()) {
+ return true;
+ }
+
desktop_config_ = desktop_config_monitor_->desktop_configuration();
for (const auto& config : desktop_config_.displays) {
size_t pixel_width = config.pixel_bounds.width();
diff --git a/third_party/libwebrtc/modules/desktop_capture/win/dxgi_duplicator_controller.h b/third_party/libwebrtc/modules/desktop_capture/win/dxgi_duplicator_controller.h
index 2b1e0ab041..815986f680 100644
--- a/third_party/libwebrtc/modules/desktop_capture/win/dxgi_duplicator_controller.h
+++ b/third_party/libwebrtc/modules/desktop_capture/win/dxgi_duplicator_controller.h
@@ -132,7 +132,7 @@ class RTC_EXPORT DxgiDuplicatorController {
// scoped_refptr<DxgiDuplicatorController> accesses private AddRef() and
// Release() functions.
- friend class rtc::scoped_refptr<DxgiDuplicatorController>;
+ friend class webrtc::scoped_refptr<DxgiDuplicatorController>;
// A private constructor to ensure consumers to use
// DxgiDuplicatorController::Instance().
diff --git a/third_party/libwebrtc/modules/module_api_gn/moz.build b/third_party/libwebrtc/modules/module_api_gn/moz.build
index d61cca4a48..7613736af2 100644
--- a/third_party/libwebrtc/modules/module_api_gn/moz.build
+++ b/third_party/libwebrtc/modules/module_api_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/module_api_public_gn/moz.build b/third_party/libwebrtc/modules/module_api_public_gn/moz.build
index 45518d1a8a..c40e3cf5e9 100644
--- a/third_party/libwebrtc/modules/module_api_public_gn/moz.build
+++ b/third_party/libwebrtc/modules/module_api_public_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/module_fec_api_gn/moz.build b/third_party/libwebrtc/modules/module_fec_api_gn/moz.build
index 7b4274f1b8..86a280e5cc 100644
--- a/third_party/libwebrtc/modules/module_fec_api_gn/moz.build
+++ b/third_party/libwebrtc/modules/module_fec_api_gn/moz.build
@@ -176,16 +176,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/pacing/interval_budget_gn/moz.build b/third_party/libwebrtc/modules/pacing/interval_budget_gn/moz.build
index a528123ae0..8bb44ecf62 100644
--- a/third_party/libwebrtc/modules/pacing/interval_budget_gn/moz.build
+++ b/third_party/libwebrtc/modules/pacing/interval_budget_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller.cc b/third_party/libwebrtc/modules/pacing/pacing_controller.cc
index 13ff9a2a95..5b81207d56 100644
--- a/third_party/libwebrtc/modules/pacing/pacing_controller.cc
+++ b/third_party/libwebrtc/modules/pacing/pacing_controller.cc
@@ -73,7 +73,7 @@ PacingController::PacingController(Clock* clock,
keyframe_flushing_(
IsEnabled(field_trials_, "WebRTC-Pacer-KeyframeFlushing")),
transport_overhead_per_packet_(DataSize::Zero()),
- send_burst_interval_(TimeDelta::Zero()),
+ send_burst_interval_(kDefaultBurstInterval),
last_timestamp_(clock_->CurrentTime()),
paused_(false),
media_debt_(DataSize::Zero()),
diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller.h b/third_party/libwebrtc/modules/pacing/pacing_controller.h
index dd5636ccef..04e0a820f9 100644
--- a/third_party/libwebrtc/modules/pacing/pacing_controller.h
+++ b/third_party/libwebrtc/modules/pacing/pacing_controller.h
@@ -25,6 +25,7 @@
#include "api/transport/field_trial_based_config.h"
#include "api/transport/network_types.h"
#include "api/units/data_size.h"
+#include "api/units/time_delta.h"
#include "modules/pacing/bitrate_prober.h"
#include "modules/pacing/interval_budget.h"
#include "modules/pacing/prioritized_packet_queue.h"
@@ -92,6 +93,10 @@ class PacingController {
// the send burst interval.
// Ex: max send burst interval = 63Kb / 10Mbit/s = 50ms.
static constexpr DataSize kMaxBurstSize = DataSize::Bytes(63 * 1000);
+ // The pacer is allowed to send enqued packets in bursts and can build up a
+ // packet "debt" that correspond to approximately the send rate during
+ // the burst interval.
+ static constexpr TimeDelta kDefaultBurstInterval = TimeDelta::Millis(40);
PacingController(Clock* clock,
PacketSender* packet_sender,
diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc b/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc
index ba93d05bb7..9e6ede6dc0 100644
--- a/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc
+++ b/third_party/libwebrtc/modules/pacing/pacing_controller_unittest.cc
@@ -427,6 +427,7 @@ TEST_F(PacingControllerTest, BudgetAffectsAudioInTrial) {
DataRate pacing_rate =
DataRate::BitsPerSec(kPacketSize / 3 * 8 * kProcessIntervalsPerSecond);
pacer.SetPacingRates(pacing_rate, DataRate::Zero());
+ pacer.SetSendBurstInterval(TimeDelta::Zero());
// Video fills budget for following process periods.
pacer.EnqueuePacket(video_.BuildNextPacket(kPacketSize));
EXPECT_CALL(callback_, SendPacket).Times(1);
@@ -484,7 +485,7 @@ TEST_F(PacingControllerTest, FirstSentPacketTimeIsSet) {
EXPECT_EQ(kStartTime, pacer->FirstSentPacketTime());
}
-TEST_F(PacingControllerTest, QueueAndPacePackets) {
+TEST_F(PacingControllerTest, QueueAndPacePacketsWithZeroBurstPeriod) {
const uint32_t kSsrc = 12345;
uint16_t sequence_number = 1234;
const DataSize kPackeSize = DataSize::Bytes(250);
@@ -495,6 +496,7 @@ TEST_F(PacingControllerTest, QueueAndPacePackets) {
const size_t kPacketsToSend = (kSendInterval * kTargetRate).bytes() *
kPaceMultiplier / kPackeSize.bytes();
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
+ pacer->SetSendBurstInterval(TimeDelta::Zero());
pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero());
for (size_t i = 0; i < kPacketsToSend; ++i) {
@@ -536,30 +538,30 @@ TEST_F(PacingControllerTest, PaceQueuedPackets) {
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero());
- // Due to the multiplicative factor we can send 5 packets during a send
- // interval. (network capacity * multiplier / (8 bits per byte *
- // (packet size * #send intervals per second)
- const size_t packets_to_send_per_interval =
- kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200);
- for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ const size_t packets_to_send_per_burst_interval =
+ (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval)
+ .bytes() /
+ kPacketSize;
+ for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) {
SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
kPacketSize);
}
- for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) {
+ for (size_t j = 0; j < packets_to_send_per_burst_interval * 10; ++j) {
pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, ssrc,
sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize));
}
- EXPECT_EQ(packets_to_send_per_interval + packets_to_send_per_interval * 10,
+ EXPECT_EQ(packets_to_send_per_burst_interval +
+ packets_to_send_per_burst_interval * 10,
pacer->QueueSizePackets());
- while (pacer->QueueSizePackets() > packets_to_send_per_interval * 10) {
+ while (pacer->QueueSizePackets() > packets_to_send_per_burst_interval * 10) {
AdvanceTimeUntil(pacer->NextSendTime());
pacer->ProcessPackets();
}
- EXPECT_EQ(pacer->QueueSizePackets(), packets_to_send_per_interval * 10);
+ EXPECT_EQ(pacer->QueueSizePackets(), packets_to_send_per_burst_interval * 10);
EXPECT_CALL(callback_, SendPadding).Times(0);
EXPECT_CALL(callback_, SendPacket(ssrc, _, _, false, false))
@@ -582,12 +584,12 @@ TEST_F(PacingControllerTest, PaceQueuedPackets) {
pacer->ProcessPackets();
// Send some more packet, just show that we can..?
- for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) {
SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc,
sequence_number++, clock_.TimeInMilliseconds(), 250);
}
- EXPECT_EQ(packets_to_send_per_interval, pacer->QueueSizePackets());
- for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ EXPECT_EQ(packets_to_send_per_burst_interval, pacer->QueueSizePackets());
+ for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) {
AdvanceTimeUntil(pacer->NextSendTime());
pacer->ProcessPackets();
}
@@ -641,19 +643,23 @@ TEST_F(PacingControllerTest,
TEST_F(PacingControllerTest, Padding) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- const size_t kPacketSize = 250;
+ const size_t kPacketSize = 1000;
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
pacer->SetPacingRates(kTargetRate * kPaceMultiplier, kTargetRate);
- const size_t kPacketsToSend = 20;
+ const size_t kPacketsToSend = 30;
for (size_t i = 0; i < kPacketsToSend; ++i) {
SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
kPacketSize);
}
+
+ int expected_bursts =
+ floor(DataSize::Bytes(pacer->QueueSizePackets() * kPacketSize) /
+ (kPaceMultiplier * kTargetRate) /
+ PacingController::kDefaultBurstInterval);
const TimeDelta expected_pace_time =
- DataSize::Bytes(pacer->QueueSizePackets() * kPacketSize) /
- (kPaceMultiplier * kTargetRate);
+ (expected_bursts - 1) * PacingController::kDefaultBurstInterval;
EXPECT_CALL(callback_, SendPadding).Times(0);
// Only the media packets should be sent.
Timestamp start_time = clock_.CurrentTime();
@@ -663,7 +669,7 @@ TEST_F(PacingControllerTest, Padding) {
}
const TimeDelta actual_pace_time = clock_.CurrentTime() - start_time;
EXPECT_LE((actual_pace_time - expected_pace_time).Abs(),
- PacingController::kMinSleepTime);
+ PacingController::kDefaultBurstInterval);
// Pacing media happens at 2.5x, but padding was configured with 1.0x
// factor. We have to wait until the padding debt is gone before we start
@@ -766,8 +772,8 @@ TEST_F(PacingControllerTest, VerifyAverageBitrateVaryingMediaPayload) {
media_payload));
media_bytes += media_payload;
}
-
- AdvanceTimeUntil(pacer->NextSendTime());
+ AdvanceTimeUntil(std::min(clock_.CurrentTime() + TimeDelta::Millis(20),
+ pacer->NextSendTime()));
pacer->ProcessPackets();
}
@@ -805,20 +811,18 @@ TEST_F(PacingControllerTest, Priority) {
// Expect all high and normal priority to be sent out first.
EXPECT_CALL(callback_, SendPadding).Times(0);
+ testing::Sequence s;
EXPECT_CALL(callback_, SendPacket(ssrc, _, capture_time_ms, _, _))
- .Times(packets_to_send_per_interval + 1);
+ .Times(packets_to_send_per_interval + 1)
+ .InSequence(s);
+ EXPECT_CALL(callback_, SendPacket(ssrc_low_priority, _,
+ capture_time_ms_low_priority, _, _))
+ .InSequence(s);
- while (pacer->QueueSizePackets() > 1) {
+ while (pacer->QueueSizePackets() > 0) {
AdvanceTimeUntil(pacer->NextSendTime());
pacer->ProcessPackets();
}
-
- EXPECT_EQ(1u, pacer->QueueSizePackets());
-
- EXPECT_CALL(callback_, SendPacket(ssrc_low_priority, _,
- capture_time_ms_low_priority, _, _));
- AdvanceTimeUntil(pacer->NextSendTime());
- pacer->ProcessPackets();
}
TEST_F(PacingControllerTest, RetransmissionPriority) {
@@ -829,23 +833,22 @@ TEST_F(PacingControllerTest, RetransmissionPriority) {
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero());
- // Due to the multiplicative factor we can send 5 packets during a send
- // interval. (network capacity * multiplier / (8 bits per byte *
- // (packet size * #send intervals per second)
- const size_t packets_to_send_per_interval =
- kTargetRate.bps() * kPaceMultiplier / (8 * 250 * 200);
+ const size_t packets_to_send_per_burst_interval =
+ (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval)
+ .bytes() /
+ 250;
pacer->ProcessPackets();
EXPECT_EQ(0u, pacer->QueueSizePackets());
// Alternate retransmissions and normal packets.
- for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) {
pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, ssrc,
sequence_number++, capture_time_ms, 250));
pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kRetransmission, ssrc,
sequence_number++,
capture_time_ms_retransmission, 250));
}
- EXPECT_EQ(2 * packets_to_send_per_interval, pacer->QueueSizePackets());
+ EXPECT_EQ(2 * packets_to_send_per_burst_interval, pacer->QueueSizePackets());
// Expect all retransmissions to be sent out first despite having a later
// capture time.
@@ -853,19 +856,19 @@ TEST_F(PacingControllerTest, RetransmissionPriority) {
EXPECT_CALL(callback_, SendPacket(_, _, _, false, _)).Times(0);
EXPECT_CALL(callback_,
SendPacket(ssrc, _, capture_time_ms_retransmission, true, _))
- .Times(packets_to_send_per_interval);
+ .Times(packets_to_send_per_burst_interval);
- while (pacer->QueueSizePackets() > packets_to_send_per_interval) {
+ while (pacer->QueueSizePackets() > packets_to_send_per_burst_interval) {
AdvanceTimeUntil(pacer->NextSendTime());
pacer->ProcessPackets();
}
- EXPECT_EQ(packets_to_send_per_interval, pacer->QueueSizePackets());
+ EXPECT_EQ(packets_to_send_per_burst_interval, pacer->QueueSizePackets());
// Expect the remaining (non-retransmission) packets to be sent.
EXPECT_CALL(callback_, SendPadding).Times(0);
EXPECT_CALL(callback_, SendPacket(_, _, _, true, _)).Times(0);
EXPECT_CALL(callback_, SendPacket(ssrc, _, capture_time_ms, false, _))
- .Times(packets_to_send_per_interval);
+ .Times(packets_to_send_per_burst_interval);
while (pacer->QueueSizePackets() > 0) {
AdvanceTimeUntil(pacer->NextSendTime());
@@ -890,13 +893,13 @@ TEST_F(PacingControllerTest, HighPrioDoesntAffectBudget) {
sequence_number++, capture_time_ms, kPacketSize);
}
pacer->ProcessPackets();
+ EXPECT_EQ(pacer->QueueSizePackets(), 0u);
// Low prio packets does affect the budget.
- // Due to the multiplicative factor we can send 5 packets during a send
- // interval. (network capacity * multiplier / (8 bits per byte *
- // (packet size * #send intervals per second)
- const size_t kPacketsToSendPerInterval =
- kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200);
- for (size_t i = 0; i < kPacketsToSendPerInterval; ++i) {
+ const size_t kPacketsToSendPerBurstInterval =
+ (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval)
+ .bytes() /
+ kPacketSize;
+ for (size_t i = 0; i < kPacketsToSendPerBurstInterval; ++i) {
SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
kPacketSize);
@@ -904,16 +907,16 @@ TEST_F(PacingControllerTest, HighPrioDoesntAffectBudget) {
// Send all packets and measure pace time.
Timestamp start_time = clock_.CurrentTime();
+ EXPECT_EQ(pacer->NextSendTime(), clock_.CurrentTime());
while (pacer->QueueSizePackets() > 0) {
AdvanceTimeUntil(pacer->NextSendTime());
pacer->ProcessPackets();
}
- // Measure pacing time. Expect only low-prio packets to affect this.
+ // Measure pacing time.
TimeDelta pacing_time = clock_.CurrentTime() - start_time;
- TimeDelta expected_pacing_time =
- DataSize::Bytes(kPacketsToSendPerInterval * kPacketSize) /
- (kTargetRate * kPaceMultiplier);
+ // All packets sent in one burst since audio packets are not accounted for.
+ TimeDelta expected_pacing_time = TimeDelta::Zero();
EXPECT_NEAR(pacing_time.us<double>(), expected_pacing_time.us<double>(),
PacingController::kMinSleepTime.us<double>());
}
@@ -965,6 +968,7 @@ TEST_F(PacingControllerTest, DoesNotAllowOveruseAfterCongestion) {
auto now_ms = [this] { return clock_.TimeInMilliseconds(); };
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero());
+ pacer->SetSendBurstInterval(TimeDelta::Zero());
EXPECT_CALL(callback_, SendPadding).Times(0);
// The pacing rate is low enough that the budget should not allow two packets
// to be sent in a row.
@@ -1853,6 +1857,7 @@ TEST_F(PacingControllerTest, AccountsForAudioEnqueueTime) {
// Audio not paced, but still accounted for in budget.
pacer->SetAccountForAudioPackets(true);
pacer->SetPacingRates(kPacingDataRate, kPaddingDataRate);
+ pacer->SetSendBurstInterval(TimeDelta::Zero());
// Enqueue two audio packets, advance clock to where one packet
// should have drained the buffer already, has they been sent
@@ -1898,13 +1903,12 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) {
EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(),
PacingController::kPausedProcessInterval);
- // Enqueue a new packet, that can't be sent until previous buffer has
- // drained.
+ // Enqueue a new packet, that can be sent immediately due to default burst
+ // rate is 40ms.
SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc,
sequnce_number++, clock_.TimeInMilliseconds(),
kPacketSize.bytes());
- EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime);
- clock_.AdvanceTime(kPacketPacingTime);
+ EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), TimeDelta::Zero());
pacer->ProcessPackets();
::testing::Mock::VerifyAndClearExpectations(&callback_);
@@ -1916,11 +1920,13 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) {
// previous debt has cleared. Since padding was disabled before, there
// currently is no padding debt.
pacer->SetPacingRates(kPacingDataRate, kPacingDataRate / 2);
- EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime);
+ EXPECT_EQ(pacer->QueueSizePackets(), 0u);
+ EXPECT_LT(pacer->NextSendTime() - clock_.CurrentTime(),
+ PacingController::kDefaultBurstInterval);
// Advance time, expect padding.
EXPECT_CALL(callback_, SendPadding).WillOnce(Return(kPacketSize.bytes()));
- clock_.AdvanceTime(kPacketPacingTime);
+ clock_.AdvanceTime(pacer->NextSendTime() - clock_.CurrentTime());
pacer->ProcessPackets();
::testing::Mock::VerifyAndClearExpectations(&callback_);
@@ -1933,7 +1939,7 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) {
pacer->EnqueuePacket(
BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++,
clock_.TimeInMilliseconds(), kPacketSize.bytes()));
- EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime);
+ EXPECT_EQ(pacer->NextSendTime(), clock_.CurrentTime());
}
TEST_F(PacingControllerTest, PaddingTargetAccountsForPaddingRate) {
@@ -2011,8 +2017,8 @@ TEST_F(PacingControllerTest, SendsFecPackets) {
TEST_F(PacingControllerTest, GapInPacingDoesntAccumulateBudget) {
const uint32_t kSsrc = 12345;
uint16_t sequence_number = 1234;
- const DataSize kPackeSize = DataSize::Bytes(250);
- const TimeDelta kPacketSendTime = TimeDelta::Millis(15);
+ const DataSize kPackeSize = DataSize::Bytes(1000);
+ const TimeDelta kPacketSendTime = TimeDelta::Millis(25);
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
pacer->SetPacingRates(kPackeSize / kPacketSendTime,
@@ -2028,15 +2034,20 @@ TEST_F(PacingControllerTest, GapInPacingDoesntAccumulateBudget) {
// Advance time kPacketSendTime past where the media debt should be 0.
clock_.AdvanceTime(2 * kPacketSendTime);
- // Enqueue two new packets. Expect only one to be sent one ProcessPackets().
+ // Enqueue three new packets. Expect only two to be sent one ProcessPackets()
+ // since the default burst interval is 40ms.
+ SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc,
+ sequence_number++, clock_.TimeInMilliseconds(),
+ kPackeSize.bytes());
+ SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc,
+ sequence_number++, clock_.TimeInMilliseconds(),
+ kPackeSize.bytes());
+ EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number + 1, _, _, _))
+ .Times(0);
pacer->EnqueuePacket(
BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number + 1,
clock_.TimeInMilliseconds(), kPackeSize.bytes()));
- pacer->EnqueuePacket(
- BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number + 2,
- clock_.TimeInMilliseconds(), kPackeSize.bytes()));
- EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number + 1,
- clock_.TimeInMilliseconds(), false, false));
+
pacer->ProcessPackets();
}
@@ -2044,6 +2055,7 @@ TEST_F(PacingControllerTest, HandlesSubMicrosecondSendIntervals) {
static constexpr DataSize kPacketSize = DataSize::Bytes(1);
static constexpr TimeDelta kPacketSendTime = TimeDelta::Micros(1);
auto pacer = std::make_unique<PacingController>(&clock_, &callback_, trials_);
+ pacer->SetSendBurstInterval(TimeDelta::Zero());
// Set pacing rate such that a packet is sent in 0.5us.
pacer->SetPacingRates(/*pacing_rate=*/2 * kPacketSize / kPacketSendTime,
diff --git a/third_party/libwebrtc/modules/pacing/pacing_gn/moz.build b/third_party/libwebrtc/modules/pacing/pacing_gn/moz.build
index 6b7f69865f..353f876c55 100644
--- a/third_party/libwebrtc/modules/pacing/pacing_gn/moz.build
+++ b/third_party/libwebrtc/modules/pacing/pacing_gn/moz.build
@@ -207,7 +207,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -217,10 +216,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc
index afa36ea88d..f7218e48a1 100644
--- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc
+++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.cc
@@ -17,35 +17,19 @@
#include "api/task_queue/pending_task_safety_flag.h"
#include "api/transport/network_types.h"
#include "rtc_base/checks.h"
-#include "rtc_base/experiments/field_trial_parser.h"
-#include "rtc_base/experiments/field_trial_units.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
-namespace {
-
-constexpr const char* kBurstyPacerFieldTrial = "WebRTC-BurstyPacer";
-
-} // namespace
-
const int TaskQueuePacedSender::kNoPacketHoldback = -1;
-TaskQueuePacedSender::BurstyPacerFlags::BurstyPacerFlags(
- const FieldTrialsView& field_trials)
- : burst("burst") {
- ParseFieldTrial({&burst}, field_trials.Lookup(kBurstyPacerFieldTrial));
-}
-
TaskQueuePacedSender::TaskQueuePacedSender(
Clock* clock,
PacingController::PacketSender* packet_sender,
const FieldTrialsView& field_trials,
TimeDelta max_hold_back_window,
- int max_hold_back_window_in_packets,
- absl::optional<TimeDelta> burst_interval)
+ int max_hold_back_window_in_packets)
: clock_(clock),
- bursty_pacer_flags_(field_trials),
max_hold_back_window_(max_hold_back_window),
max_hold_back_window_in_packets_(max_hold_back_window_in_packets),
pacing_controller_(clock, packet_sender, field_trials),
@@ -56,17 +40,6 @@ TaskQueuePacedSender::TaskQueuePacedSender(
include_overhead_(false),
task_queue_(TaskQueueBase::Current()) {
RTC_DCHECK_GE(max_hold_back_window_, PacingController::kMinSleepTime);
- // There are multiple field trials that can affect burst. If multiple bursts
- // are specified we pick the largest of the values.
- absl::optional<TimeDelta> burst = bursty_pacer_flags_.burst.GetOptional();
- // If not overriden by an experiment, the burst is specified by the
- // `burst_interval` argument.
- if (!burst.has_value()) {
- burst = burst_interval;
- }
- if (burst.has_value()) {
- pacing_controller_.SetSendBurstInterval(burst.value());
- }
}
TaskQueuePacedSender::~TaskQueuePacedSender() {
@@ -74,6 +47,11 @@ TaskQueuePacedSender::~TaskQueuePacedSender() {
is_shutdown_ = true;
}
+void TaskQueuePacedSender::SetSendBurstInterval(TimeDelta burst_interval) {
+ RTC_DCHECK_RUN_ON(task_queue_);
+ pacing_controller_.SetSendBurstInterval(burst_interval);
+}
+
void TaskQueuePacedSender::EnsureStarted() {
RTC_DCHECK_RUN_ON(task_queue_);
is_started_ = true;
diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h
index fd71be1654..e29acdf878 100644
--- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h
+++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender.h
@@ -45,23 +45,21 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender {
// processed. Increasing this reduces thread wakeups at the expense of higher
// latency.
//
- // If the `burst_interval` parameter is set, the pacer is allowed to build up
- // a packet "debt" that correspond to approximately the send rate during the
- // specified interval. This greatly reduced wake ups by not pacing packets
- // within the allowed burst budget.
- //
// The taskqueue used when constructing a TaskQueuePacedSender will also be
// used for pacing.
- TaskQueuePacedSender(
- Clock* clock,
- PacingController::PacketSender* packet_sender,
- const FieldTrialsView& field_trials,
- TimeDelta max_hold_back_window,
- int max_hold_back_window_in_packets,
- absl::optional<TimeDelta> burst_interval = absl::nullopt);
+ TaskQueuePacedSender(Clock* clock,
+ PacingController::PacketSender* packet_sender,
+ const FieldTrialsView& field_trials,
+ TimeDelta max_hold_back_window,
+ int max_hold_back_window_in_packets);
~TaskQueuePacedSender() override;
+ // The pacer is allowed to send enqued packets in bursts and can build up a
+ // packet "debt" that correspond to approximately the send rate during
+ // 'burst_interval'.
+ void SetSendBurstInterval(TimeDelta burst_interval);
+
// Ensure that necessary delayed tasks are scheduled.
void EnsureStarted();
@@ -145,15 +143,6 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender {
Stats GetStats() const;
Clock* const clock_;
- struct BurstyPacerFlags {
- // Parses `kBurstyPacerFieldTrial`. Example:
- // --force-fieldtrials=WebRTC-BurstyPacer/burst:20ms/
- explicit BurstyPacerFlags(const FieldTrialsView& field_trials);
- // If set, the pacer is allowed to build up a packet "debt" that correspond
- // to approximately the send rate during the specified interval.
- FieldTrialOptional<TimeDelta> burst;
- };
- const BurstyPacerFlags bursty_pacer_flags_;
// The holdback window prevents too frequent delayed MaybeProcessPackets()
// calls. These are only applicable if `allow_low_precision` is false.
diff --git a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc
index 54347493e7..f0a9ad78c2 100644
--- a/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc
+++ b/third_party/libwebrtc/modules/pacing/task_queue_paced_sender_unittest.cc
@@ -11,6 +11,7 @@
#include "modules/pacing/task_queue_paced_sender.h"
#include <algorithm>
+#include <any>
#include <atomic>
#include <list>
#include <memory>
@@ -24,6 +25,7 @@
#include "api/units/data_rate.h"
#include "api/units/data_size.h"
#include "api/units/time_delta.h"
+#include "modules/pacing/pacing_controller.h"
#include "modules/pacing/packet_router.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "test/gmock.h"
@@ -33,6 +35,9 @@
using ::testing::_;
using ::testing::AtLeast;
+using ::testing::AtMost;
+using ::testing::Lt;
+using ::testing::NiceMock;
using ::testing::Return;
using ::testing::SaveArg;
@@ -167,9 +172,10 @@ TEST(TaskQueuePacedSenderTest, PacesPacketsWithBurst) {
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
PacingController::kMinSleepTime,
- TaskQueuePacedSender::kNoPacketHoldback,
- // Half a second of bursting.
- TimeDelta::Seconds(0.5));
+ TaskQueuePacedSender::kNoPacketHoldback);
+ pacer.SetSendBurstInterval(
+ // Half a second of bursting.
+ TimeDelta::Seconds(0.5));
// Insert a number of packets, covering one second.
static constexpr size_t kPacketsToSend = 42;
@@ -262,7 +268,7 @@ TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) {
TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) {
GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
+ NiceMock<MockPacketRouter> packet_router;
ScopedKeyValueConfig trials;
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
@@ -270,21 +276,16 @@ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) {
TaskQueuePacedSender::kNoPacketHoldback);
const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125);
- const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
- const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate;
pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
pacer.EnsureStarted();
- // Add some initial video packets, only one should be sent.
- EXPECT_CALL(packet_router, SendPacket);
+ // Add some initial video packets. Not all should be sent immediately.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtMost(9));
pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
time_controller.AdvanceTime(TimeDelta::Zero());
::testing::Mock::VerifyAndClearExpectations(&packet_router);
- // Advance time, but still before next packet should be sent.
- time_controller.AdvanceTime(kPacketPacingTime / 2);
-
// Insert an audio packet, it should be sent immediately.
EXPECT_CALL(packet_router, SendPacket);
pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1));
@@ -295,12 +296,13 @@ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) {
TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) {
const TimeDelta kCoalescingWindow = TimeDelta::Millis(5);
GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
+ NiceMock<MockPacketRouter> packet_router;
ScopedKeyValueConfig trials;
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
kCoalescingWindow,
TaskQueuePacedSender::kNoPacketHoldback);
+ pacer.SetSendBurstInterval(TimeDelta::Zero());
// Set rates so one packet adds one ms of buffer level.
const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
@@ -310,9 +312,9 @@ TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) {
pacer.SetPacingRates(kPacingDataRate, DataRate::Zero());
pacer.EnsureStarted();
- // Add 10 packets. The first should be sent immediately since the buffers
- // are clear.
- EXPECT_CALL(packet_router, SendPacket);
+ // Add 10 packets. The first burst should be sent immediately since the
+ // buffers are clear.
+ EXPECT_CALL(packet_router, SendPacket).Times(AtMost(9));
pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10));
time_controller.AdvanceTime(TimeDelta::Zero());
::testing::Mock::VerifyAndClearExpectations(&packet_router);
@@ -370,11 +372,12 @@ TEST(TaskQueuePacedSenderTest, SchedulesProbeAtSentTime) {
ScopedKeyValueConfig trials(
"WebRTC-Bwe-ProbingBehavior/min_probe_delta:1ms/");
GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
+ NiceMock<MockPacketRouter> packet_router;
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
PacingController::kMinSleepTime,
TaskQueuePacedSender::kNoPacketHoldback);
+ pacer.SetSendBurstInterval(TimeDelta::Zero());
// Set rates so one packet adds 4ms of buffer level.
const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
@@ -504,11 +507,12 @@ TEST(TaskQueuePacedSenderTest, PacketBasedCoalescing) {
const int kPacketBasedHoldback = 5;
GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234));
- MockPacketRouter packet_router;
+ NiceMock<MockPacketRouter> packet_router;
ScopedKeyValueConfig trials;
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
kFixedCoalescingWindow, kPacketBasedHoldback);
+ pacer.SetSendBurstInterval(TimeDelta::Zero());
// Set rates so one packet adds one ms of buffer level.
const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
@@ -559,6 +563,7 @@ TEST(TaskQueuePacedSenderTest, FixedHoldBackHasPriorityOverPackets) {
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
kFixedCoalescingWindow, kPacketBasedHoldback);
+ pacer.SetSendBurstInterval(TimeDelta::Zero());
// Set rates so one packet adds one ms of buffer level.
const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize);
@@ -691,7 +696,7 @@ TEST(TaskQueuePacedSenderTest, PostedPacketsNotSendFromRemovePacketsForSsrc) {
TEST(TaskQueuePacedSenderTest, Stats) {
static constexpr Timestamp kStartTime = Timestamp::Millis(1234);
GlobalSimulatedTimeController time_controller(kStartTime);
- MockPacketRouter packet_router;
+ NiceMock<MockPacketRouter> packet_router;
ScopedKeyValueConfig trials;
TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials,
@@ -708,7 +713,8 @@ TEST(TaskQueuePacedSenderTest, Stats) {
// Allowed `QueueSizeData` and `ExpectedQueueTime` deviation.
static constexpr size_t kAllowedPacketsDeviation = 1;
static constexpr DataSize kAllowedQueueSizeDeviation =
- DataSize::Bytes(kDefaultPacketSize * kAllowedPacketsDeviation);
+ DataSize::Bytes(kDefaultPacketSize * kAllowedPacketsDeviation) +
+ kPacingRate * PacingController::kDefaultBurstInterval;
static constexpr TimeDelta kAllowedQueueTimeDeviation =
kAllowedQueueSizeDeviation / kPacingRate;
diff --git a/third_party/libwebrtc/modules/portal/pipewire_utils.h b/third_party/libwebrtc/modules/portal/pipewire_utils.h
index 8344a8cefb..c1327b85c9 100644
--- a/third_party/libwebrtc/modules/portal/pipewire_utils.h
+++ b/third_party/libwebrtc/modules/portal/pipewire_utils.h
@@ -11,6 +11,21 @@
#ifndef MODULES_PORTAL_PIPEWIRE_UTILS_H_
#define MODULES_PORTAL_PIPEWIRE_UTILS_H_
+#include <errno.h>
+#include <stdint.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+
+// static
+struct dma_buf_sync {
+ uint64_t flags;
+};
+#define DMA_BUF_SYNC_READ (1 << 0)
+#define DMA_BUF_SYNC_START (0 << 2)
+#define DMA_BUF_SYNC_END (1 << 2)
+#define DMA_BUF_BASE 'b'
+#define DMA_BUF_IOCTL_SYNC _IOW(DMA_BUF_BASE, 0, struct dma_buf_sync)
+
struct pw_thread_loop;
namespace webrtc {
@@ -32,6 +47,66 @@ class PipeWireThreadLoopLock {
pw_thread_loop* const loop_;
};
+// We should synchronize DMA Buffer object access from CPU to avoid potential
+// cache incoherency and data loss.
+// See
+// https://01.org/linuxgraphics/gfx-docs/drm/driver-api/dma-buf.html#cpu-access-to-dma-buffer-objects
+static bool SyncDmaBuf(int fd, uint64_t start_or_end) {
+ struct dma_buf_sync sync = {0};
+
+ sync.flags = start_or_end | DMA_BUF_SYNC_READ;
+
+ while (true) {
+ int ret;
+ ret = ioctl(fd, DMA_BUF_IOCTL_SYNC, &sync);
+ if (ret == -1 && errno == EINTR) {
+ continue;
+ } else if (ret == -1) {
+ return false;
+ } else {
+ break;
+ }
+ }
+
+ return true;
+}
+
+class ScopedBuf {
+ public:
+ ScopedBuf() {}
+ ScopedBuf(uint8_t* map, int map_size, int fd, bool is_dma_buf = false)
+ : map_(map), map_size_(map_size), fd_(fd), is_dma_buf_(is_dma_buf) {}
+ ~ScopedBuf() {
+ if (map_ != MAP_FAILED) {
+ if (is_dma_buf_) {
+ SyncDmaBuf(fd_, DMA_BUF_SYNC_END);
+ }
+ munmap(map_, map_size_);
+ }
+ }
+
+ explicit operator bool() { return map_ != MAP_FAILED; }
+
+ void initialize(uint8_t* map, int map_size, int fd, bool is_dma_buf = false) {
+ map_ = map;
+ map_size_ = map_size;
+ is_dma_buf_ = is_dma_buf;
+ fd_ = fd;
+
+ if (is_dma_buf_) {
+ SyncDmaBuf(fd_, DMA_BUF_SYNC_START);
+ }
+ }
+
+ uint8_t* get() { return map_; }
+
+ protected:
+ uint8_t* map_ = static_cast<uint8_t*>(MAP_FAILED);
+ int map_size_;
+ int fd_;
+ bool is_dma_buf_;
+};
+
} // namespace webrtc
#endif // MODULES_PORTAL_PIPEWIRE_UTILS_H_
diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
index 97fa490adf..c9edc4f551 100644
--- a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
+++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control.h
@@ -108,7 +108,7 @@ class AimdRateControl {
// If "Disabled", estimated link capacity is not used as upper bound.
FieldTrialFlag disable_estimate_bounded_increase_{"Disabled"};
FieldTrialParameter<bool> use_current_estimate_as_min_upper_bound_{"c_upper",
- false};
+ true};
absl::optional<DataRate> last_decrease_;
};
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
index f26afe995c..401e87e310 100644
--- a/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
+++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc
@@ -208,6 +208,7 @@ TEST(AimdRateControlTest, SetEstimateIncreaseBweInAlr) {
TEST(AimdRateControlTest, SetEstimateUpperLimitedByNetworkEstimate) {
AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""),
/*send_side=*/true);
+ aimd_rate_control.SetEstimate(DataRate::BitsPerSec(300'000), kInitialTime);
NetworkStateEstimate network_estimate;
network_estimate.link_capacity_upper = DataRate::BitsPerSec(400'000);
aimd_rate_control.SetNetworkStateEstimate(network_estimate);
@@ -217,11 +218,9 @@ TEST(AimdRateControlTest, SetEstimateUpperLimitedByNetworkEstimate) {
}
TEST(AimdRateControlTest,
- SetEstimateUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) {
- AimdRateControl aimd_rate_control(
- ExplicitKeyValueConfig(
- "WebRTC-Bwe-EstimateBoundedIncrease/c_upper:true/"),
- /*send_side=*/true);
+ SetEstimateDefaultUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) {
+ AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""),
+ /*send_side=*/true);
aimd_rate_control.SetEstimate(DataRate::BitsPerSec(500'000), kInitialTime);
ASSERT_EQ(aimd_rate_control.LatestEstimate(), DataRate::BitsPerSec(500'000));
@@ -233,9 +232,12 @@ TEST(AimdRateControlTest,
}
TEST(AimdRateControlTest,
- SetEstimateDefaultNotUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) {
- AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""),
- /*send_side=*/true);
+ SetEstimateNotUpperLimitedByCurrentBitrateIfNetworkEstimateIsLowIf) {
+ AimdRateControl aimd_rate_control(
+ ExplicitKeyValueConfig(
+ "WebRTC-Bwe-EstimateBoundedIncrease/c_upper:false/"),
+ /*send_side=*/true);
+
aimd_rate_control.SetEstimate(DataRate::BitsPerSec(500'000), kInitialTime);
ASSERT_EQ(aimd_rate_control.LatestEstimate(), DataRate::BitsPerSec(500'000));
diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_gn/moz.build b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_gn/moz.build
index 2876755e91..45104d15ca 100644
--- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_gn/moz.build
@@ -211,7 +211,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -221,10 +220,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn
index 0fc9931f39..b471c2fa76 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn
+++ b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn
@@ -258,6 +258,13 @@ rtc_library("rtp_rtcp") {
"source/video_rtp_depacketizer_vp9.h",
]
+ if (rtc_use_h265) {
+ sources += [
+ "source/rtp_packetizer_h265.cc",
+ "source/rtp_packetizer_h265.h",
+ ]
+ }
+
if (rtc_enable_bwe_test_logging) {
defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ]
} else {
@@ -624,6 +631,10 @@ if (rtc_include_tests) {
"source/video_rtp_depacketizer_vp8_unittest.cc",
"source/video_rtp_depacketizer_vp9_unittest.cc",
]
+ if (rtc_use_h265) {
+ sources += [ "source/rtp_packetizer_h265_unittest.cc" ]
+ }
+
deps = [
":fec_test_helper",
":frame_transformer_factory_unittest",
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/leb128_gn/moz.build b/third_party/libwebrtc/modules/rtp_rtcp/leb128_gn/moz.build
index 88f2cb22e0..e42ea18507 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/leb128_gn/moz.build
+++ b/third_party/libwebrtc/modules/rtp_rtcp/leb128_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_format_gn/moz.build b/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_format_gn/moz.build
index da304ae5a4..33d8799fb2 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_format_gn/moz.build
+++ b/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_format_gn/moz.build
@@ -241,7 +241,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -251,10 +250,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_gn/moz.build b/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_gn/moz.build
index 382194837b..8c49736436 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_gn/moz.build
+++ b/third_party/libwebrtc/modules/rtp_rtcp/rtp_rtcp_gn/moz.build
@@ -255,7 +255,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -265,10 +264,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build b/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build
index 2c8b5e2321..d2a102cfe3 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build
+++ b/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
index cfca7cb066..3e6d04d59c 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc
@@ -138,9 +138,9 @@ bool FlexfecHeaderReader::ReadFecHeader(
mask_part0 <<= 1;
ByteWriter<uint16_t>::WriteBigEndian(&data[byte_index], mask_part0);
byte_index += kFlexfecPacketMaskSizes[0];
- if (k_bit0) {
- // The first K-bit is set, and the packet mask is thus only 2 bytes long.
- // We have finished reading the properties for current ssrc.
+ if (!k_bit0) {
+ // The first K-bit is clear, and the packet mask is thus only 2 bytes
+ // long. We have finished reading the properties for current ssrc.
fec_packet->protected_streams[i].packet_mask_size =
kFlexfecPacketMaskSizes[0];
} else {
@@ -162,8 +162,8 @@ bool FlexfecHeaderReader::ReadFecHeader(
mask_part1 <<= 2;
ByteWriter<uint32_t>::WriteBigEndian(&data[byte_index], mask_part1);
byte_index += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0];
- if (k_bit1) {
- // The first K-bit is clear, but the second K-bit is set. The packet
+ if (!k_bit1) {
+ // The first K-bit is set, but the second K-bit is clear. The packet
// mask is thus 6 bytes long. We have finished reading the properties
// for current ssrc.
fec_packet->protected_streams[i].packet_mask_size =
@@ -273,8 +273,9 @@ void FlexfecHeaderWriter::FinalizeFecHeader(
tmp_mask_part0 >>= 1; // Shift, thus clearing K-bit 0.
ByteWriter<uint16_t>::WriteBigEndian(write_at, tmp_mask_part0);
+ *write_at |= 0x80; // Set K-bit 0.
write_at += kFlexfecPacketMaskSizes[0];
- tmp_mask_part1 >>= 2; // Shift, thus clearing K-bit 1 and bit 15.
+ tmp_mask_part1 >>= 2; // Shift twice, thus clearing K-bit 1 and bit 15.
ByteWriter<uint32_t>::WriteBigEndian(write_at, tmp_mask_part1);
bool bit15 = (protected_stream.packet_mask[1] & 0x01) != 0;
@@ -284,9 +285,9 @@ void FlexfecHeaderWriter::FinalizeFecHeader(
bool bit46 = (protected_stream.packet_mask[5] & 0x02) != 0;
bool bit47 = (protected_stream.packet_mask[5] & 0x01) != 0;
if (!bit46 && !bit47) {
- *write_at |= 0x80; // Set K-bit 1.
write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0];
} else {
+ *write_at |= 0x80; // Set K-bit 1.
write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0];
// Clear all trailing bits.
memset(write_at, 0,
@@ -307,14 +308,13 @@ void FlexfecHeaderWriter::FinalizeFecHeader(
ByteWriter<uint16_t>::WriteBigEndian(write_at, tmp_mask_part0);
bool bit15 = (protected_stream.packet_mask[1] & 0x01) != 0;
if (!bit15) {
- *write_at |= 0x80; // Set K-bit 0.
write_at += kFlexfecPacketMaskSizes[0];
} else {
+ *write_at |= 0x80; // Set K-bit 0.
write_at += kFlexfecPacketMaskSizes[0];
// Clear all trailing bits.
memset(write_at, 0U,
kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]);
- *write_at |= 0x80; // Set K-bit 1.
*write_at |= 0x40; // Set bit 15.
write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0];
}
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc
index 6995ba3871..f25e0d8d2a 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc
@@ -36,11 +36,12 @@ using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket;
using ::testing::Each;
using ::testing::ElementsAreArray;
-constexpr uint8_t kMask0[] = {0xAB, 0xCD}; // First K bit is set.
-constexpr uint8_t kMask1[] = {0x12, 0x34, // First K bit cleared.
- 0xF6, 0x78, 0x9A, 0xBC}; // Second K bit set.
-constexpr uint8_t kMask2[] = {0x12, 0x34, // First K bit cleared.
- 0x56, 0x78, 0x9A, 0xBC, // Second K bit cleared.
+constexpr uint8_t kKBit = 1 << 7;
+constexpr uint8_t kMask0[] = {0x2B, 0xCD}; // First K bit is cleared.
+constexpr uint8_t kMask1[] = {0x92, 0x34, // First K bit set.
+ 0x76, 0x78, 0x9A, 0xBC}; // Second K bit cleared.
+constexpr uint8_t kMask2[] = {0x92, 0x34, // First K bit set.
+ 0xD6, 0x78, 0x9A, 0xBC, // Second K bit set.
0xDE, 0xF0, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC};
constexpr size_t kMediaPacketLength = 1234;
@@ -186,11 +187,10 @@ void VerifyWrittenAndReadHeaders(
} // namespace
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0SetSingleStream) {
- constexpr uint8_t kKBit0 = 1 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0ClearSingleStream) {
constexpr size_t kExpectedFecHeaderSize = 12;
constexpr uint16_t kSnBase = 0x0102;
- constexpr uint8_t kFlexfecPktMask[] = {kKBit0 | 0x08, 0x81};
+ constexpr uint8_t kFlexfecPktMask[] = {0x08, 0x81};
constexpr uint8_t kUlpfecPacketMask[] = {0x11, 0x02};
constexpr uint8_t kPacketData[] = {
kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1],
@@ -215,13 +215,11 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0SetSingleStream) {
VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected);
}
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1SetSingleStream) {
- constexpr uint8_t kKBit0 = 0 << 7;
- constexpr uint8_t kKBit1 = 1 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1ClearSingleStream) {
constexpr size_t kExpectedFecHeaderSize = 16;
constexpr uint16_t kSnBase = 0x0102;
- constexpr uint8_t kFlexfecPktMask[] = {kKBit0 | 0x48, 0x81, //
- kKBit1 | 0x02, 0x11, 0x00, 0x21};
+ constexpr uint8_t kFlexfecPktMask[] = {kKBit | 0x48, 0x81, //
+ 0x02, 0x11, 0x00, 0x21};
constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, //
0x08, 0x44, 0x00, 0x84};
constexpr uint8_t kPacketData[] = {
@@ -250,15 +248,13 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1SetSingleStream) {
VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected);
}
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSetSingleStream) {
- constexpr uint8_t kKBit0 = 0 << 7;
- constexpr uint8_t kKBit1 = 0 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithBothKBitsSetSingleStream) {
constexpr size_t kExpectedFecHeaderSize = 24;
constexpr uint16_t kSnBase = 0x0102;
- constexpr uint8_t kFlexfecPacketMask[] = {kKBit0 | 0x48, 0x81, //
- kKBit1 | 0x02, 0x11, 0x00, 0x21, //
- 0x01, 0x11, 0x11, 0x11,
- 0x11, 0x11, 0x11, 0x11};
+ constexpr uint8_t kFlexfecPacketMask[] = {kKBit | 0x48, 0x81, //
+ kKBit | 0x02, 0x11, 0x00, 0x21, //
+ 0x01, 0x11, 0x11, 0x11,
+ 0x11, 0x11, 0x11, 0x11};
constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, //
0x08, 0x44, 0x00, 0x84, //
0x04, 0x44, 0x44, 0x44,
@@ -309,14 +305,13 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSetSingleStream) {
VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected);
}
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Set2Streams) {
- constexpr uint8_t kKBit0 = 1 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Clear2Streams) {
constexpr size_t kExpectedFecHeaderSize = 16;
constexpr uint16_t kSnBase0 = 0x0102;
constexpr uint16_t kSnBase1 = 0x0304;
- constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x08, 0x81};
+ constexpr uint8_t kFlexfecPktMask1[] = {0x08, 0x81};
constexpr uint8_t kUlpfecPacketMask1[] = {0x11, 0x02};
- constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x04, 0x41};
+ constexpr uint8_t kFlexfecPktMask2[] = {0x04, 0x41};
constexpr uint8_t kUlpfecPacketMask2[] = {0x08, 0x82};
constexpr uint8_t kPacketData[] = {
@@ -349,18 +344,16 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Set2Streams) {
VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected);
}
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Set2Streams) {
- constexpr uint8_t kKBit0 = 0 << 7;
- constexpr uint8_t kKBit1 = 1 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Clear2Streams) {
constexpr size_t kExpectedFecHeaderSize = 24;
constexpr uint16_t kSnBase0 = 0x0102;
constexpr uint16_t kSnBase1 = 0x0304;
- constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x48, 0x81, //
- kKBit1 | 0x02, 0x11, 0x00, 0x21};
+ constexpr uint8_t kFlexfecPktMask1[] = {kKBit | 0x48, 0x81, //
+ 0x02, 0x11, 0x00, 0x21};
constexpr uint8_t kUlpfecPacketMask1[] = {0x91, 0x02, //
0x08, 0x44, 0x00, 0x84};
- constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x57, 0x82, //
- kKBit1 | 0x04, 0x33, 0x00, 0x51};
+ constexpr uint8_t kFlexfecPktMask2[] = {kKBit | 0x57, 0x82, //
+ 0x04, 0x33, 0x00, 0x51};
constexpr uint8_t kUlpfecPacketMask2[] = {0xAF, 0x04, //
0x10, 0xCC, 0x01, 0x44};
constexpr uint8_t kPacketData[] = {
@@ -398,24 +391,22 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Set2Streams) {
VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected);
}
-TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSet2Streams) {
- constexpr uint8_t kKBit0 = 0 << 7;
- constexpr uint8_t kKBit1 = 0 << 7;
+TEST(FlexfecHeaderReaderTest, ReadsHeaderWithBothKBitsSet2Streams) {
constexpr size_t kExpectedFecHeaderSize = 40;
constexpr uint16_t kSnBase0 = 0x0102;
constexpr uint16_t kSnBase1 = 0x0304;
- constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x48, 0x81, //
- kKBit1 | 0x02, 0x11, 0x00, 0x21, //
- 0x01, 0x11, 0x11, 0x11,
- 0x11, 0x11, 0x11, 0x11};
+ constexpr uint8_t kFlexfecPktMask1[] = {kKBit | 0x48, 0x81, //
+ kKBit | 0x02, 0x11, 0x00, 0x21, //
+ 0x01, 0x11, 0x11, 0x11,
+ 0x11, 0x11, 0x11, 0x11};
constexpr uint8_t kUlpfecPacketMask1[] = {0x91, 0x02, //
0x08, 0x44, 0x00, 0x84, //
0x04, 0x44, 0x44, 0x44,
0x44, 0x44, 0x44, 0x44};
- constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x32, 0x84, //
- kKBit1 | 0x05, 0x23, 0x00, 0x55, //
- 0xA3, 0x22, 0x22, 0x22,
- 0x22, 0x22, 0x22, 0x35};
+ constexpr uint8_t kFlexfecPktMask2[] = {kKBit | 0x32, 0x84, //
+ kKBit | 0x05, 0x23, 0x00, 0x55, //
+ 0xA3, 0x22, 0x22, 0x22,
+ 0x22, 0x22, 0x22, 0x35};
constexpr uint8_t kUlpfecPacketMask2[] = {0x65, 0x08, //
0x14, 0x8C, 0x01, 0x56, //
0x8C, 0x88, 0x88, 0x88,
@@ -490,29 +481,27 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSet2Streams) {
}
TEST(FlexfecHeaderReaderTest, ReadsHeaderWithMultipleStreamsMultipleMasks) {
- constexpr uint8_t kBit0 = 0 << 7;
- constexpr uint8_t kBit1 = 1 << 7;
constexpr size_t kExpectedFecHeaderSize = 44;
constexpr uint16_t kSnBase0 = 0x0102;
constexpr uint16_t kSnBase1 = 0x0304;
constexpr uint16_t kSnBase2 = 0x0506;
constexpr uint16_t kSnBase3 = 0x0708;
- constexpr uint8_t kFlexfecPacketMask1[] = {kBit1 | 0x29, 0x91};
+ constexpr uint8_t kFlexfecPacketMask1[] = {0x29, 0x91};
constexpr uint8_t kUlpfecPacketMask1[] = {0x53, 0x22};
- constexpr uint8_t kFlexfecPacketMask2[] = {kBit0 | 0x32, 0xA1, //
- kBit1 | 0x02, 0x11, 0x00, 0x21};
+ constexpr uint8_t kFlexfecPacketMask2[] = {kKBit | 0x32, 0xA1, //
+ 0x02, 0x11, 0x00, 0x21};
constexpr uint8_t kUlpfecPacketMask2[] = {0x65, 0x42, //
0x08, 0x44, 0x00, 0x84};
- constexpr uint8_t kFlexfecPacketMask3[] = {kBit0 | 0x48, 0x81, //
- kBit0 | 0x02, 0x11, 0x00, 0x21, //
+ constexpr uint8_t kFlexfecPacketMask3[] = {kKBit | 0x48, 0x81, //
+ kKBit | 0x02, 0x11, 0x00, 0x21, //
0x01, 0x11, 0x11, 0x11,
0x11, 0x11, 0x11, 0x11};
constexpr uint8_t kUlpfecPacketMask3[] = {0x91, 0x02, //
0x08, 0x44, 0x00, 0x84, //
0x04, 0x44, 0x44, 0x44,
0x44, 0x44, 0x44, 0x44};
- constexpr uint8_t kFlexfecPacketMask4[] = {kBit0 | 0x32, 0x84, //
- kBit1 | 0x05, 0x23, 0x00, 0x55};
+ constexpr uint8_t kFlexfecPacketMask4[] = {kKBit | 0x32, 0x84, //
+ 0x05, 0x23, 0x00, 0x55};
constexpr uint8_t kUlpfecPacketMask4[] = {0x65, 0x08, //
0x14, 0x8C, 0x01, 0x54};
constexpr uint8_t kPacketData[] = {kFlexible,
@@ -642,7 +631,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit0SetShouldFail) {
EXPECT_FALSE(reader.ReadFecHeader(&read_packet));
}
-TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) {
+TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1ClearShouldFail) {
// Simulate short received packet.
constexpr uint8_t kPacketData[] = {
kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1],
@@ -659,7 +648,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) {
EXPECT_FALSE(reader.ReadFecHeader(&read_packet));
}
-TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1ClearedShouldFail) {
+TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) {
// Simulate short received packet.
constexpr uint8_t kPacketData[] = {
kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1],
@@ -698,8 +687,8 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketMultipleStreamsShouldFail) {
EXPECT_FALSE(reader.ReadFecHeader(&read_packet));
}
-TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0SetSingleStream) {
- constexpr uint8_t kFlexfecPacketMask[] = {0x88, 0x81};
+TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0ClearSingleStream) {
+ constexpr uint8_t kFlexfecPacketMask[] = {0x08, 0x81};
constexpr uint8_t kUlpfecPacketMask[] = {0x11, 0x02};
constexpr uint16_t kMediaStartSeqNum = 1234;
Packet written_packet = WritePacket({{.ssrc = 0x01,
@@ -714,8 +703,8 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0SetSingleStream) {
VerifyFinalizedHeaders(written_packet, expected);
}
-TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1SetSingleStream) {
- constexpr uint8_t kFlexfecPacketMask[] = {0x48, 0x81, 0x82, 0x11, 0x00, 0x21};
+TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1ClearSingleStream) {
+ constexpr uint8_t kFlexfecPacketMask[] = {0xC8, 0x81, 0x02, 0x11, 0x00, 0x21};
constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, 0x08, 0x44, 0x00, 0x84};
constexpr uint16_t kMediaStartSeqNum = 1234;
Packet written_packet = WritePacket({{.ssrc = 0x01,
@@ -730,10 +719,10 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1SetSingleStream) {
VerifyFinalizedHeaders(written_packet, expected);
}
-TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithNoKBitsSetSingleStream) {
+TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithBothKBitsSetSingleStream) {
constexpr uint8_t kFlexfecPacketMask[] = {
- 0x11, 0x11, // K-bit 0 clear.
- 0x11, 0x11, 0x11, 0x10, // K-bit 1 clear.
+ 0x91, 0x11, // K-bit 0 set.
+ 0x91, 0x11, 0x11, 0x10, // K-bit 1 set.
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 //
};
constexpr uint8_t kUlpfecPacketMask[] = {0x22, 0x22, 0x44, 0x44, 0x44, 0x41};
@@ -752,22 +741,22 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithNoKBitsSetSingleStream) {
TEST(FlexfecHeaderWriterTest, FinalizesHeaderMultipleStreamsMultipleMasks) {
constexpr uint8_t kFlexfecPacketMask1[] = {
- 0x11, 0x11, // K-bit 0 clear.
- 0x11, 0x11, 0x11, 0x10, // K-bit 1 clear.
+ 0x91, 0x11, // K-bit 0 set.
+ 0x91, 0x11, 0x11, 0x10, // K-bit 1 set.
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 //
};
constexpr uint8_t kUlpfecPacketMask1[] = {0x22, 0x22, 0x44, 0x44, 0x44, 0x41};
constexpr uint16_t kMediaStartSeqNum1 = 1234;
- constexpr uint8_t kFlexfecPacketMask2[] = {0x88, 0x81};
+ constexpr uint8_t kFlexfecPacketMask2[] = {0x08, 0x81};
constexpr uint8_t kUlpfecPacketMask2[] = {0x11, 0x02};
constexpr uint16_t kMediaStartSeqNum2 = 2345;
- constexpr uint8_t kFlexfecPacketMask3[] = {0x48, 0x81, 0x82,
+ constexpr uint8_t kFlexfecPacketMask3[] = {0xC8, 0x81, 0x02,
0x11, 0x00, 0x21};
constexpr uint8_t kUlpfecPacketMask3[] = {0x91, 0x02, 0x08, 0x44, 0x00, 0x84};
constexpr uint16_t kMediaStartSeqNum3 = 3456;
constexpr uint8_t kFlexfecPacketMask4[] = {
- 0x55, 0xAA, // K-bit 0 clear.
- 0x22, 0xAB, 0xCD, 0xEF, // K-bit 1 clear.
+ 0xD5, 0xAA, // K-bit 0 set.
+ 0xA2, 0xAB, 0xCD, 0xEF, // K-bit 1 set.
0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 //
};
constexpr uint8_t kUlpfecPacketMask4[] = {0xAB, 0x54, 0x8A, 0xAF, 0x37, 0xBF};
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_format.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_format.cc
index 2c11a29bfa..c7534dee40 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_format.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_format.cc
@@ -22,6 +22,9 @@
#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "rtc_base/checks.h"
+#ifdef RTC_ENABLE_H265
+#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h"
+#endif
namespace webrtc {
@@ -57,7 +60,11 @@ std::unique_ptr<RtpPacketizer> RtpPacketizer::Create(
return std::make_unique<RtpPacketizerAv1>(
payload, limits, rtp_video_header.frame_type,
rtp_video_header.is_last_frame_in_picture);
- // TODO(bugs.webrtc.org/13485): Implement RtpPacketizerH265.
+#ifdef RTC_ENABLE_H265
+ case kVideoCodecH265: {
+ return std::make_unique<RtpPacketizerH265>(payload, limits);
+ }
+#endif
default: {
return std::make_unique<RtpPacketizerGeneric>(payload, limits,
rtp_video_header);
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc
index 95dbaf364c..859b529a47 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_av1.cc
@@ -74,8 +74,7 @@ RtpPacketizerAv1::RtpPacketizerAv1(rtc::ArrayView<const uint8_t> payload,
std::vector<RtpPacketizerAv1::Obu> RtpPacketizerAv1::ParseObus(
rtc::ArrayView<const uint8_t> payload) {
std::vector<Obu> result;
- rtc::ByteBufferReader payload_reader(
- reinterpret_cast<const char*>(payload.data()), payload.size());
+ rtc::ByteBufferReader payload_reader(payload);
while (payload_reader.Length() > 0) {
Obu obu;
payload_reader.ReadUInt8(&obu.header);
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.cc
new file mode 100644
index 0000000000..313680cc87
--- /dev/null
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.cc
@@ -0,0 +1,350 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h"
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "common_video/h264/h264_common.h"
+#include "common_video/h265/h265_common.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+namespace {
+
+// The payload header consists of the same
+// fields (F, Type, LayerId and TID) as the NAL unit header. Refer to
+// section 4.2 in RFC 7798.
+constexpr size_t kH265PayloadHeaderSize = 2;
+// Unlike H.264, H265 NAL header is 2-bytes.
+constexpr size_t kH265NalHeaderSize = 2;
+// H265's FU is constructed of 2-byte payload header, 1-byte FU header and FU
+// payload.
+constexpr size_t kH265FuHeaderSize = 1;
+// The NALU size for H265 RTP aggregated packet indicates the size of the NAL
+// unit is 2-bytes.
+constexpr size_t kH265LengthFieldSize = 2;
+
+enum H265NalHdrMasks {
+ kH265FBit = 0x80,
+ kH265TypeMask = 0x7E,
+ kH265LayerIDHMask = 0x1,
+ kH265LayerIDLMask = 0xF8,
+ kH265TIDMask = 0x7,
+ kH265TypeMaskN = 0x81,
+ kH265TypeMaskInFuHeader = 0x3F
+};
+
+// Bit masks for FU headers.
+enum H265FuBitmasks {
+ kH265SBitMask = 0x80,
+ kH265EBitMask = 0x40,
+ kH265FuTypeBitMask = 0x3F
+};
+
+} // namespace
+
+RtpPacketizerH265::RtpPacketizerH265(rtc::ArrayView<const uint8_t> payload,
+ PayloadSizeLimits limits)
+ : limits_(limits), num_packets_left_(0) {
+ for (const auto& nalu :
+ H264::FindNaluIndices(payload.data(), payload.size())) {
+ input_fragments_.push_back(
+ payload.subview(nalu.payload_start_offset, nalu.payload_size));
+ }
+
+ if (!GeneratePackets()) {
+ // If failed to generate all the packets, discard already generated
+ // packets in case the caller would ignore return value and still try to
+ // call NextPacket().
+ num_packets_left_ = 0;
+ while (!packets_.empty()) {
+ packets_.pop();
+ }
+ }
+}
+
+RtpPacketizerH265::~RtpPacketizerH265() = default;
+
+size_t RtpPacketizerH265::NumPackets() const {
+ return num_packets_left_;
+}
+
+bool RtpPacketizerH265::GeneratePackets() {
+ for (size_t i = 0; i < input_fragments_.size();) {
+ int fragment_len = input_fragments_[i].size();
+ int single_packet_capacity = limits_.max_payload_len;
+ if (input_fragments_.size() == 1) {
+ single_packet_capacity -= limits_.single_packet_reduction_len;
+ } else if (i == 0) {
+ single_packet_capacity -= limits_.first_packet_reduction_len;
+ } else if (i + 1 == input_fragments_.size()) {
+ // Pretend that last fragment is larger instead of making last packet
+ // smaller.
+ single_packet_capacity -= limits_.last_packet_reduction_len;
+ }
+ if (fragment_len > single_packet_capacity) {
+ if (!PacketizeFu(i)) {
+ return false;
+ }
+ ++i;
+ } else {
+ i = PacketizeAp(i);
+ }
+ }
+ return true;
+}
+
+bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) {
+ // Fragment payload into packets (FU).
+ // Strip out the original header and leave room for the FU header.
+ rtc::ArrayView<const uint8_t> fragment = input_fragments_[fragment_index];
+ PayloadSizeLimits limits = limits_;
+ // Refer to section 4.4.3 in RFC7798, each FU fragment will have a 2-bytes
+ // payload header and a one-byte FU header. DONL is not supported so ignore
+ // its size when calculating max_payload_len.
+ limits.max_payload_len -= kH265FuHeaderSize + kH265PayloadHeaderSize;
+
+ // Update single/first/last packet reductions unless it is single/first/last
+ // fragment.
+ if (input_fragments_.size() != 1) {
+ // if this fragment is put into a single packet, it might still be the
+ // first or the last packet in the whole sequence of packets.
+ if (fragment_index == input_fragments_.size() - 1) {
+ limits.single_packet_reduction_len = limits_.last_packet_reduction_len;
+ } else if (fragment_index == 0) {
+ limits.single_packet_reduction_len = limits_.first_packet_reduction_len;
+ } else {
+ limits.single_packet_reduction_len = 0;
+ }
+ }
+ if (fragment_index != 0) {
+ limits.first_packet_reduction_len = 0;
+ }
+ if (fragment_index != input_fragments_.size() - 1) {
+ limits.last_packet_reduction_len = 0;
+ }
+
+ // Strip out the original header.
+ size_t payload_left = fragment.size() - kH265NalHeaderSize;
+ int offset = kH265NalHeaderSize;
+
+ std::vector<int> payload_sizes = SplitAboutEqually(payload_left, limits);
+ if (payload_sizes.empty()) {
+ return false;
+ }
+
+ for (size_t i = 0; i < payload_sizes.size(); ++i) {
+ int packet_length = payload_sizes[i];
+ RTC_CHECK_GT(packet_length, 0);
+ uint16_t header = (fragment[0] << 8) | fragment[1];
+ packets_.push({.source_fragment = fragment.subview(offset, packet_length),
+ .first_fragment = (i == 0),
+ .last_fragment = (i == payload_sizes.size() - 1),
+ .aggregated = false,
+ .header = header});
+ offset += packet_length;
+ payload_left -= packet_length;
+ }
+ num_packets_left_ += payload_sizes.size();
+ RTC_CHECK_EQ(payload_left, 0);
+ return true;
+}
+
+int RtpPacketizerH265::PacketizeAp(size_t fragment_index) {
+ // Aggregate fragments into one packet.
+ size_t payload_size_left = limits_.max_payload_len;
+ if (input_fragments_.size() == 1) {
+ payload_size_left -= limits_.single_packet_reduction_len;
+ } else if (fragment_index == 0) {
+ payload_size_left -= limits_.first_packet_reduction_len;
+ }
+ int aggregated_fragments = 0;
+ size_t fragment_headers_length = 0;
+ rtc::ArrayView<const uint8_t> fragment = input_fragments_[fragment_index];
+ RTC_CHECK_GE(payload_size_left, fragment.size());
+ ++num_packets_left_;
+
+ auto payload_size_needed = [&] {
+ size_t fragment_size = fragment.size() + fragment_headers_length;
+ if (input_fragments_.size() == 1) {
+ // Single fragment, single packet, payload_size_left already adjusted
+ // with limits_.single_packet_reduction_len.
+ return fragment_size;
+ }
+ if (fragment_index == input_fragments_.size() - 1) {
+ // Last fragment, so this might be the last packet.
+ return fragment_size + limits_.last_packet_reduction_len;
+ }
+ return fragment_size;
+ };
+
+ while (payload_size_left >= payload_size_needed()) {
+ RTC_CHECK_GT(fragment.size(), 0);
+ packets_.push({.source_fragment = fragment,
+ .first_fragment = (aggregated_fragments == 0),
+ .last_fragment = false,
+ .aggregated = true,
+ .header = fragment[0]});
+ payload_size_left -= fragment.size();
+ payload_size_left -= fragment_headers_length;
+
+ fragment_headers_length = kH265LengthFieldSize;
+ // If we are going to try to aggregate more fragments into this packet
+ // we need to add the AP NALU header and a length field for the first
+ // NALU of this packet.
+ if (aggregated_fragments == 0) {
+ fragment_headers_length += kH265PayloadHeaderSize + kH265LengthFieldSize;
+ }
+ ++aggregated_fragments;
+
+ // Next fragment.
+ ++fragment_index;
+ if (fragment_index == input_fragments_.size()) {
+ break;
+ }
+ fragment = input_fragments_[fragment_index];
+ }
+ RTC_CHECK_GT(aggregated_fragments, 0);
+ packets_.back().last_fragment = true;
+ return fragment_index;
+}
+
+bool RtpPacketizerH265::NextPacket(RtpPacketToSend* rtp_packet) {
+ RTC_DCHECK(rtp_packet);
+
+ if (packets_.empty()) {
+ return false;
+ }
+
+ PacketUnit packet = packets_.front();
+
+ if (packet.first_fragment && packet.last_fragment) {
+ // Single NAL unit packet. Do not support DONL for single NAL unit packets,
+ // DONL field is not present.
+ size_t bytes_to_send = packet.source_fragment.size();
+ uint8_t* buffer = rtp_packet->AllocatePayload(bytes_to_send);
+ memcpy(buffer, packet.source_fragment.data(), bytes_to_send);
+ packets_.pop();
+ input_fragments_.pop_front();
+ } else if (packet.aggregated) {
+ NextAggregatePacket(rtp_packet);
+ } else {
+ NextFragmentPacket(rtp_packet);
+ }
+ rtp_packet->SetMarker(packets_.empty());
+ --num_packets_left_;
+ return true;
+}
+
+void RtpPacketizerH265::NextAggregatePacket(RtpPacketToSend* rtp_packet) {
+ size_t payload_capacity = rtp_packet->FreeCapacity();
+ RTC_CHECK_GE(payload_capacity, kH265PayloadHeaderSize);
+ uint8_t* buffer = rtp_packet->AllocatePayload(payload_capacity);
+ RTC_CHECK(buffer);
+ PacketUnit* packet = &packets_.front();
+ RTC_CHECK(packet->first_fragment);
+
+ /*
+ +---------------+---------------+
+ |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |F| Type | LayerId | TID |
+ +-------------+-----------------+
+ */
+ // Refer to section section 4.4.2 for aggregation packets and modify type to
+ // 48 in PayloadHdr for aggregate packet. Do not support DONL for aggregation
+ // packets, DONL field is not present.
+ uint8_t payload_hdr_h = packet->header >> 8;
+ uint8_t payload_hdr_l = packet->header & 0xFF;
+ uint8_t layer_id_h = payload_hdr_h & kH265LayerIDHMask;
+ payload_hdr_h = (payload_hdr_h & kH265TypeMaskN) |
+ (H265::NaluType::kAp << 1) | layer_id_h;
+ buffer[0] = payload_hdr_h;
+ buffer[1] = payload_hdr_l;
+
+ int index = kH265PayloadHeaderSize;
+ bool is_last_fragment = packet->last_fragment;
+ while (packet->aggregated) {
+ // Add NAL unit length field.
+ rtc::ArrayView<const uint8_t> fragment = packet->source_fragment;
+ ByteWriter<uint16_t>::WriteBigEndian(&buffer[index], fragment.size());
+ index += kH265LengthFieldSize;
+ // Add NAL unit.
+ memcpy(&buffer[index], fragment.data(), fragment.size());
+ index += fragment.size();
+ packets_.pop();
+ input_fragments_.pop_front();
+ if (is_last_fragment) {
+ break;
+ }
+ packet = &packets_.front();
+ is_last_fragment = packet->last_fragment;
+ }
+ RTC_CHECK(is_last_fragment);
+ rtp_packet->SetPayloadSize(index);
+}
+
+void RtpPacketizerH265::NextFragmentPacket(RtpPacketToSend* rtp_packet) {
+ PacketUnit* packet = &packets_.front();
+ // NAL unit fragmented over multiple packets (FU).
+ // We do not send original NALU header, so it will be replaced by the
+ // PayloadHdr of the first packet.
+ /*
+ +---------------+---------------+
+ |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |F| Type | LayerId | TID |
+ +-------------+-----------------+
+ */
+ // Refer to section section 4.4.3 for aggregation packets and modify type to
+ // 49 in PayloadHdr for aggregate packet.
+ uint8_t payload_hdr_h =
+ packet->header >> 8; // 1-bit F, 6-bit type, 1-bit layerID highest-bit
+ uint8_t payload_hdr_l = packet->header & 0xFF;
+ uint8_t layer_id_h = payload_hdr_h & kH265LayerIDHMask;
+ uint8_t fu_header = 0;
+ /*
+ +---------------+
+ |0|1|2|3|4|5|6|7|
+ +-+-+-+-+-+-+-+-+
+ |S|E| FuType |
+ +---------------+
+ */
+ // S bit indicates the start of a fragmented NAL unit.
+ // E bit indicates the end of a fragmented NAL unit.
+ // FuType must be equal to the field type value of the fragmented NAL unit.
+ fu_header |= (packet->first_fragment ? kH265SBitMask : 0);
+ fu_header |= (packet->last_fragment ? kH265EBitMask : 0);
+ uint8_t type = (payload_hdr_h & kH265TypeMask) >> 1;
+ fu_header |= type;
+ // Now update payload_hdr_h with FU type.
+ payload_hdr_h = (payload_hdr_h & kH265TypeMaskN) |
+ (H265::NaluType::kFu << 1) | layer_id_h;
+ rtc::ArrayView<const uint8_t> fragment = packet->source_fragment;
+ uint8_t* buffer = rtp_packet->AllocatePayload(
+ kH265FuHeaderSize + kH265PayloadHeaderSize + fragment.size());
+ RTC_CHECK(buffer);
+ buffer[0] = payload_hdr_h;
+ buffer[1] = payload_hdr_l;
+ buffer[2] = fu_header;
+
+ // Do not support DONL for fragmentation units, DONL field is not present.
+ memcpy(buffer + kH265FuHeaderSize + kH265PayloadHeaderSize, fragment.data(),
+ fragment.size());
+ if (packet->last_fragment) {
+ input_fragments_.pop_front();
+ }
+ packets_.pop();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.h
new file mode 100644
index 0000000000..95442f795c
--- /dev/null
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_
+#define MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_
+
+#include <deque>
+#include <queue>
+#include <string>
+
+#include "api/array_view.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+
+namespace webrtc {
+
+class RtpPacketizerH265 : public RtpPacketizer {
+ public:
+ // Initialize with payload from encoder.
+ // The payload_data must be exactly one encoded H.265 frame.
+ // For H265 we only support tx-mode SRST.
+ RtpPacketizerH265(rtc::ArrayView<const uint8_t> payload,
+ PayloadSizeLimits limits);
+
+ RtpPacketizerH265(const RtpPacketizerH265&) = delete;
+ RtpPacketizerH265& operator=(const RtpPacketizerH265&) = delete;
+
+ ~RtpPacketizerH265() override;
+
+ size_t NumPackets() const override;
+
+ // Get the next payload with H.265 payload header.
+ // Write payload and set marker bit of the `packet`.
+ // Returns true on success or false if there was no payload to packetize.
+ bool NextPacket(RtpPacketToSend* rtp_packet) override;
+
+ private:
+ struct PacketUnit {
+ rtc::ArrayView<const uint8_t> source_fragment;
+ bool first_fragment = false;
+ bool last_fragment = false;
+ bool aggregated = false;
+ uint16_t header = 0;
+ };
+ std::deque<rtc::ArrayView<const uint8_t>> input_fragments_;
+ std::queue<PacketUnit> packets_;
+
+ bool GeneratePackets();
+ bool PacketizeFu(size_t fragment_index);
+ int PacketizeAp(size_t fragment_index);
+
+ void NextAggregatePacket(RtpPacketToSend* rtp_packet);
+ void NextFragmentPacket(RtpPacketToSend* rtp_packet);
+
+ const PayloadSizeLimits limits_;
+ size_t num_packets_left_ = 0;
+};
+} // namespace webrtc
+#endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc
new file mode 100644
index 0000000000..cb1de334c0
--- /dev/null
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc
@@ -0,0 +1,525 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h"
+
+#include <vector>
+
+#include "common_video/h265/h265_common.h"
+#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/byte_io.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::Each;
+using ::testing::ElementsAre;
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+constexpr RtpPacketToSend::ExtensionManager* kNoExtensions = nullptr;
+constexpr size_t kMaxPayloadSize = 1200;
+constexpr size_t kLengthFieldLength = 2;
+constexpr RtpPacketizer::PayloadSizeLimits kNoLimits;
+
+constexpr size_t kNalHeaderSize = 2;
+constexpr size_t kFuHeaderSize = 3;
+
+constexpr uint8_t kNaluTypeMask = 0x7E;
+
+// Bit masks for FU headers.
+constexpr uint8_t kH265SBit = 0x80;
+constexpr uint8_t kH265EBit = 0x40;
+
+// Creates Buffer that looks like nal unit of given size.
+rtc::Buffer GenerateNalUnit(size_t size) {
+ RTC_CHECK_GT(size, 0);
+ rtc::Buffer buffer(size);
+ // Set some valid header with type TRAIL_R and temporal id
+ buffer[0] = 2;
+ buffer[1] = 2;
+ for (size_t i = 2; i < size; ++i) {
+ buffer[i] = static_cast<uint8_t>(i);
+ }
+ // Last byte shouldn't be 0, or it may be counted as part of next 4-byte start
+ // sequence.
+ buffer[size - 1] |= 0x10;
+ return buffer;
+}
+
+// Create frame consisting of nalus of given size.
+rtc::Buffer CreateFrame(std::initializer_list<size_t> nalu_sizes) {
+ static constexpr int kStartCodeSize = 3;
+ rtc::Buffer frame(absl::c_accumulate(nalu_sizes, size_t{0}) +
+ kStartCodeSize * nalu_sizes.size());
+ size_t offset = 0;
+ for (size_t nalu_size : nalu_sizes) {
+ EXPECT_GE(nalu_size, 1u);
+ // Insert nalu start code
+ frame[offset] = 0;
+ frame[offset + 1] = 0;
+ frame[offset + 2] = 1;
+ // Set some valid header.
+ frame[offset + 3] = 2;
+ // Fill payload avoiding accidental start codes
+ if (nalu_size > 1) {
+ memset(frame.data() + offset + 4, 0x3f, nalu_size - 1);
+ }
+ offset += (kStartCodeSize + nalu_size);
+ }
+ return frame;
+}
+
+// Create frame consisting of given nalus.
+rtc::Buffer CreateFrame(rtc::ArrayView<const rtc::Buffer> nalus) {
+ static constexpr int kStartCodeSize = 3;
+ int frame_size = 0;
+ for (const rtc::Buffer& nalu : nalus) {
+ frame_size += (kStartCodeSize + nalu.size());
+ }
+ rtc::Buffer frame(frame_size);
+ size_t offset = 0;
+ for (const rtc::Buffer& nalu : nalus) {
+ // Insert nalu start code
+ frame[offset] = 0;
+ frame[offset + 1] = 0;
+ frame[offset + 2] = 1;
+ // Copy the nalu unit.
+ memcpy(frame.data() + offset + 3, nalu.data(), nalu.size());
+ offset += (kStartCodeSize + nalu.size());
+ }
+ return frame;
+}
+
+std::vector<RtpPacketToSend> FetchAllPackets(RtpPacketizerH265* packetizer) {
+ std::vector<RtpPacketToSend> result;
+ size_t num_packets = packetizer->NumPackets();
+ result.reserve(num_packets);
+ RtpPacketToSend packet(kNoExtensions);
+ while (packetizer->NextPacket(&packet)) {
+ result.push_back(packet);
+ }
+ EXPECT_THAT(result, SizeIs(num_packets));
+ return result;
+}
+
+// Single nalu tests.
+TEST(RtpPacketizerH265Test, SingleNalu) {
+ const uint8_t frame[] = {0, 0, 1, H265::kIdrWRadl, 0xFF};
+
+ RtpPacketizerH265 packetizer(frame, kNoLimits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(1));
+ EXPECT_THAT(packets[0].payload(), ElementsAre(H265::kIdrWRadl, 0xFF));
+}
+
+TEST(RtpPacketizerH265Test, SingleNaluTwoPackets) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = kMaxPayloadSize;
+ rtc::Buffer nalus[] = {GenerateNalUnit(kMaxPayloadSize),
+ GenerateNalUnit(100)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(2));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
+}
+
+TEST(RtpPacketizerH265Test,
+ SingleNaluFirstPacketReductionAppliesOnlyToFirstFragment) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 200;
+ limits.first_packet_reduction_len = 5;
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/195),
+ GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/200)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(3));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
+ EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2]));
+}
+
+TEST(RtpPacketizerH265Test,
+ SingleNaluLastPacketReductionAppliesOnlyToLastFragment) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 200;
+ limits.last_packet_reduction_len = 5;
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/200),
+ GenerateNalUnit(/*size=*/195)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(3));
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1]));
+ EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2]));
+}
+
+TEST(RtpPacketizerH265Test,
+ SingleNaluFirstAndLastPacketReductionSumsForSinglePacket) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 200;
+ limits.first_packet_reduction_len = 20;
+ limits.last_packet_reduction_len = 30;
+ rtc::Buffer frame = CreateFrame({150});
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ EXPECT_THAT(packets, SizeIs(1));
+}
+
+// Aggregation tests.
+TEST(RtpPacketizerH265Test, ApRespectsNoPacketReduction) {
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/0x123)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, kNoLimits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(1));
+ auto payload = packets[0].payload();
+ int type = H265::ParseNaluType(payload[0]);
+ EXPECT_EQ(payload.size(),
+ kNalHeaderSize + 3 * kLengthFieldLength + 2 + 2 + 0x123);
+
+ EXPECT_EQ(type, H265::NaluType::kAp);
+ payload = payload.subview(kNalHeaderSize);
+ // 1st fragment.
+ EXPECT_THAT(payload.subview(0, kLengthFieldLength),
+ ElementsAre(0, 2)); // Size.
+ EXPECT_THAT(payload.subview(kLengthFieldLength, 2),
+ ElementsAreArray(nalus[0]));
+ payload = payload.subview(kLengthFieldLength + 2);
+ // 2nd fragment.
+ EXPECT_THAT(payload.subview(0, kLengthFieldLength),
+ ElementsAre(0, 2)); // Size.
+ EXPECT_THAT(payload.subview(kLengthFieldLength, 2),
+ ElementsAreArray(nalus[1]));
+ payload = payload.subview(kLengthFieldLength + 2);
+ // 3rd fragment.
+ EXPECT_THAT(payload.subview(0, kLengthFieldLength),
+ ElementsAre(0x1, 0x23)); // Size.
+ EXPECT_THAT(payload.subview(kLengthFieldLength), ElementsAreArray(nalus[2]));
+}
+
+TEST(RtpPacketizerH265Test, ApRespectsFirstPacketReduction) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1000;
+ limits.first_packet_reduction_len = 100;
+ const size_t kFirstFragmentSize =
+ limits.max_payload_len - limits.first_packet_reduction_len;
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(2));
+ // Expect 1st packet is single nalu.
+ EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0]));
+ // Expect 2nd packet is aggregate of last two fragments.
+ // The size of H265 nal_unit_header is 2 bytes, according to 7.3.1.2
+ // in H265 spec. Aggregation packet type is 48, and nuh_temporal_id_plus1
+ // is 2, so the nal_unit_header should be "01100000 00000010",
+ // which is 96 and 2.
+ EXPECT_THAT(packets[1].payload(),
+ ElementsAre(96, 2, //
+ 0, 2, nalus[1][0], nalus[1][1], //
+ 0, 2, nalus[2][0], nalus[2][1]));
+}
+
+TEST(RtpPacketizerH265Test, ApRespectsLastPacketReduction) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1000;
+ limits.last_packet_reduction_len = 100;
+ const size_t kLastFragmentSize =
+ limits.max_payload_len - limits.last_packet_reduction_len;
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/kLastFragmentSize)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(2));
+ // Expect 1st packet is aggregate of 1st two fragments.
+ EXPECT_THAT(packets[0].payload(),
+ ElementsAre(96, 2, //
+ 0, 2, nalus[0][0], nalus[0][1], //
+ 0, 2, nalus[1][0], nalus[1][1]));
+ // Expect 2nd packet is single nalu.
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2]));
+}
+
+TEST(RtpPacketizerH265Test, TooSmallForApHeaders) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1000;
+ const size_t kLastFragmentSize =
+ limits.max_payload_len - 3 * kLengthFieldLength - 4;
+ rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/2),
+ GenerateNalUnit(/*size=*/kLastFragmentSize)};
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(2));
+ // Expect 1st packet is aggregate of 1st two fragments.
+ EXPECT_THAT(packets[0].payload(),
+ ElementsAre(96, 2, //
+ 0, 2, nalus[0][0], nalus[0][1], //
+ 0, 2, nalus[1][0], nalus[1][1]));
+ // Expect 2nd packet is single nalu.
+ EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2]));
+}
+
+TEST(RtpPacketizerH265Test, LastFragmentFitsInSingleButNotLastPacket) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1178;
+ limits.first_packet_reduction_len = 0;
+ limits.last_packet_reduction_len = 20;
+ limits.single_packet_reduction_len = 20;
+ // Actual sizes, which triggered this bug.
+ rtc::Buffer frame = CreateFrame({20, 8, 18, 1161});
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ // Last packet has to be of correct size.
+ // Incorrect implementation might miss this constraint and not split the last
+ // fragment in two packets.
+ EXPECT_LE(static_cast<int>(packets.back().payload_size()),
+ limits.max_payload_len - limits.last_packet_reduction_len);
+}
+
+// Splits frame with payload size `frame_payload_size` without fragmentation,
+// Returns sizes of the payloads excluding FU headers.
+std::vector<int> TestFu(size_t frame_payload_size,
+ const RtpPacketizer::PayloadSizeLimits& limits) {
+ rtc::Buffer nalu[] = {GenerateNalUnit(kNalHeaderSize + frame_payload_size)};
+ rtc::Buffer frame = CreateFrame(nalu);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ EXPECT_GE(packets.size(), 2u); // Single packet indicates it is not FU.
+ std::vector<uint16_t> fu_header;
+ std::vector<int> payload_sizes;
+
+ for (const RtpPacketToSend& packet : packets) {
+ auto payload = packet.payload();
+ EXPECT_GT(payload.size(), kFuHeaderSize);
+ // FU header is after the 2-bytes size PayloadHdr according to 4.4.3 in spec
+ fu_header.push_back(payload[2]);
+ payload_sizes.push_back(payload.size() - kFuHeaderSize);
+ }
+
+ EXPECT_TRUE(fu_header.front() & kH265SBit);
+ EXPECT_TRUE(fu_header.back() & kH265EBit);
+ // Clear S and E bits before testing all are duplicating same original header.
+ fu_header.front() &= ~kH265SBit;
+ fu_header.back() &= ~kH265EBit;
+ uint8_t nalu_type = (nalu[0][0] & kNaluTypeMask) >> 1;
+ EXPECT_THAT(fu_header, Each(Eq(nalu_type)));
+
+ return payload_sizes;
+}
+
+// Fragmentation tests.
+TEST(RtpPacketizerH265Test, FuOddSize) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ EXPECT_THAT(TestFu(1200, limits), ElementsAre(600, 600));
+}
+
+TEST(RtpPacketizerH265Test, FuWithFirstPacketReduction) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ limits.first_packet_reduction_len = 4;
+ limits.single_packet_reduction_len = 4;
+ EXPECT_THAT(TestFu(1198, limits), ElementsAre(597, 601));
+}
+
+TEST(RtpPacketizerH265Test, FuWithLastPacketReduction) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ limits.last_packet_reduction_len = 4;
+ limits.single_packet_reduction_len = 4;
+ EXPECT_THAT(TestFu(1198, limits), ElementsAre(601, 597));
+}
+
+TEST(RtpPacketizerH265Test, FuWithSinglePacketReduction) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1199;
+ limits.single_packet_reduction_len = 200;
+ EXPECT_THAT(TestFu(1000, limits), ElementsAre(500, 500));
+}
+
+TEST(RtpPacketizerH265Test, FuEvenSize) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ EXPECT_THAT(TestFu(1201, limits), ElementsAre(600, 601));
+}
+
+TEST(RtpPacketizerH265Test, FuRounding) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1448;
+ EXPECT_THAT(TestFu(10123, limits),
+ ElementsAre(1265, 1265, 1265, 1265, 1265, 1266, 1266, 1266));
+}
+
+TEST(RtpPacketizerH265Test, FuBig) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = 1200;
+ // Generate 10 full sized packets, leave room for FU headers.
+ EXPECT_THAT(
+ TestFu(10 * (1200 - kFuHeaderSize), limits),
+ ElementsAre(1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197));
+}
+
+struct PacketInfo {
+ bool first_fragment = false;
+ bool last_fragment = false;
+ bool aggregated = false;
+ int nalu_index = 0;
+ int nalu_number = 0;
+ int payload_size = 0;
+ int start_offset = 0;
+};
+
+struct MixedApFuTestParams {
+ std::vector<int> nalus;
+ int expect_packetsSize = 0;
+ std::vector<PacketInfo> expected_packets;
+};
+
+class RtpPacketizerH265ParametrizedTest
+ : public ::testing::TestWithParam<MixedApFuTestParams> {};
+
+// Fragmentation + aggregation mixed testing.
+TEST_P(RtpPacketizerH265ParametrizedTest, MixedApFu) {
+ RtpPacketizer::PayloadSizeLimits limits;
+ const MixedApFuTestParams params = GetParam();
+ limits.max_payload_len = 100;
+ std::vector<rtc::Buffer> nalus;
+ nalus.reserve(params.nalus.size());
+
+ // Generate nalus according to size specified in paramters
+ for (size_t index = 0; index < params.nalus.size(); index++) {
+ nalus.push_back(GenerateNalUnit(params.nalus[index]));
+ }
+ rtc::Buffer frame = CreateFrame(nalus);
+
+ RtpPacketizerH265 packetizer(frame, limits);
+ std::vector<RtpPacketToSend> packets = FetchAllPackets(&packetizer);
+
+ ASSERT_THAT(packets, SizeIs(params.expect_packetsSize));
+ for (int i = 0; i < params.expect_packetsSize; i++) {
+ PacketInfo expected_packet = params.expected_packets[i];
+ if (expected_packet.aggregated) {
+ int type = H265::ParseNaluType(packets[i].payload()[0]);
+ EXPECT_THAT(type, H265::NaluType::kAp);
+ auto payload = packets[i].payload().subview(kNalHeaderSize);
+ int offset = 0;
+ // Generated AP packet header and payload align
+ for (int j = expected_packet.nalu_index; j < expected_packet.nalu_number;
+ j++) {
+ EXPECT_THAT(payload.subview(0, kLengthFieldLength),
+ ElementsAre(0, nalus[j].size()));
+ EXPECT_THAT(
+ payload.subview(offset + kLengthFieldLength, nalus[j].size()),
+ ElementsAreArray(nalus[j]));
+ offset += kLengthFieldLength + nalus[j].size();
+ }
+ } else {
+ uint8_t fu_header = 0;
+ fu_header |= (expected_packet.first_fragment ? kH265SBit : 0);
+ fu_header |= (expected_packet.last_fragment ? kH265EBit : 0);
+ fu_header |= H265::NaluType::kTrailR;
+ EXPECT_THAT(packets[i].payload().subview(0, kFuHeaderSize),
+ ElementsAre(98, 2, fu_header));
+ EXPECT_THAT(
+ packets[i].payload().subview(kFuHeaderSize),
+ ElementsAreArray(nalus[expected_packet.nalu_index].data() +
+ kNalHeaderSize + expected_packet.start_offset,
+ expected_packet.payload_size));
+ }
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ RtpPacketizerH265Test,
+ RtpPacketizerH265ParametrizedTest,
+ testing::Values(
+ // FU + AP + FU.
+ // GenerateNalUnit will include 2 bytes nalu header, for FU packet split
+ // calculation, this 2-byte nalu header length should be excluded.
+ MixedApFuTestParams{.nalus = {140, 20, 20, 160},
+ .expect_packetsSize = 5,
+ .expected_packets = {{.first_fragment = true,
+ .nalu_index = 0,
+ .payload_size = 69,
+ .start_offset = 0},
+ {.last_fragment = true,
+ .nalu_index = 0,
+ .payload_size = 69,
+ .start_offset = 69},
+ {.aggregated = true,
+ .nalu_index = 1,
+ .nalu_number = 2},
+ {.first_fragment = true,
+ .nalu_index = 3,
+ .payload_size = 79,
+ .start_offset = 0},
+ {.last_fragment = true,
+ .nalu_index = 3,
+ .payload_size = 79,
+ .start_offset = 79}}},
+ // AP + FU + AP
+ MixedApFuTestParams{
+ .nalus = {20, 20, 160, 30, 30},
+ .expect_packetsSize = 4,
+ .expected_packets = {
+ {.aggregated = true, .nalu_index = 0, .nalu_number = 2},
+ {.first_fragment = true,
+ .nalu_index = 2,
+ .payload_size = 79,
+ .start_offset = 0},
+ {.last_fragment = true,
+ .nalu_index = 2,
+ .payload_size = 79,
+ .start_offset = 79},
+ {.aggregated = true, .nalu_index = 3, .nalu_number = 2}}}));
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
index ff482b39b6..31e8b71117 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc
@@ -53,8 +53,8 @@ RTCPSender::Configuration AddRtcpSendEvaluationCallback(
RtpPacketHistory::PaddingMode GetPaddingMode(
const FieldTrialsView* field_trials) {
- if (field_trials &&
- field_trials->IsEnabled("WebRTC-PaddingMode-RecentLargePacket")) {
+ if (!field_trials ||
+ !field_trials->IsDisabled("WebRTC-PaddingMode-RecentLargePacket")) {
return RtpPacketHistory::PaddingMode::kRecentLargePacket;
}
return RtpPacketHistory::PaddingMode::kPriority;
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index b826c30e07..9d2258dc66 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -254,7 +254,8 @@ bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) {
return false;
}
- std::unique_ptr<RtpPacketToSend> packet = rtp_sender_->AllocatePacket();
+ std::unique_ptr<RtpPacketToSend> packet =
+ rtp_sender_->AllocatePacket(frame.csrcs);
packet->SetMarker(MarkerBit(frame.type, frame.payload_id));
packet->SetPayloadType(frame.payload_id);
packet->SetTimestamp(frame.rtp_timestamp);
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index 662f908216..83a2cb211f 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -61,6 +61,9 @@ class RTPSenderAudio {
// header-extension-for-audio-level-indication.
// Valid range is [0,127]. Actual value is negative.
absl::optional<int> audio_level_dbov;
+
+ // Contributing sources list.
+ rtc::ArrayView<const uint32_t> csrcs;
};
bool SendAudio(const RtpAudioFrame& frame);
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
index 0db610c149..724cd3a5e0 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc
@@ -222,4 +222,19 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
EXPECT_FALSE(transport_.last_sent_packet().Marker());
}
+TEST_F(RtpSenderAudioTest, SendsCsrcs) {
+ const char payload_name[] = "audio";
+ const uint8_t payload_type = 127;
+ ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload(
+ payload_name, payload_type, 48000, 0, 1500));
+ uint8_t payload[] = {47, 11, 32, 93, 89};
+
+ std::vector<uint32_t> csrcs({123, 456, 789});
+
+ ASSERT_TRUE(rtp_sender_audio_->SendAudio(
+ {.payload = payload, .payload_id = payload_type, .csrcs = csrcs}));
+
+ EXPECT_EQ(transport_.last_sent_packet().Csrcs(), csrcs);
+}
+
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
index 9d7c58d19a..ae9eb6b4bd 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc
@@ -19,10 +19,17 @@
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "rtc_base/checks.h"
#include "rtc_base/event.h"
+#include "rtc_base/logging.h"
namespace webrtc {
namespace {
+// Using a reasonable default of 10ms for the retransmission delay for frames
+// not coming from this sender's encoder. This is usually taken from an
+// estimate of the RTT of the link,so 10ms should be a reasonable estimate for
+// frames being re-transmitted to a peer, probably on the same network.
+const TimeDelta kDefaultRetransmissionsTime = TimeDelta::Millis(10);
+
class TransformableVideoSenderFrame : public TransformableVideoFrameInterface {
public:
TransformableVideoSenderFrame(const EncodedImage& encoded_image,
@@ -155,6 +162,17 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame(
const EncodedImage& encoded_image,
RTPVideoHeader video_header,
TimeDelta expected_retransmission_time) {
+ {
+ MutexLock lock(&sender_lock_);
+ if (short_circuit_) {
+ sender_->SendVideo(payload_type, codec_type, rtp_timestamp,
+ encoded_image.CaptureTime(),
+ *encoded_image.GetEncodedData(), encoded_image.size(),
+ video_header, expected_retransmission_time,
+ /*csrcs=*/{});
+ return true;
+ }
+ }
frame_transformer_->Transform(std::make_unique<TransformableVideoSenderFrame>(
encoded_image, video_header, payload_type, codec_type, rtp_timestamp,
expected_retransmission_time, ssrc_,
@@ -177,6 +195,11 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame(
});
}
+void RTPSenderVideoFrameTransformerDelegate::StartShortCircuiting() {
+ MutexLock lock(&sender_lock_);
+ short_circuit_ = true;
+}
+
void RTPSenderVideoFrameTransformerDelegate::SendVideo(
std::unique_ptr<TransformableFrameInterface> transformed_frame) const {
RTC_DCHECK_RUN_ON(transformation_queue_.get());
@@ -200,15 +223,17 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo(
auto* transformed_video_frame =
static_cast<TransformableVideoFrameInterface*>(transformed_frame.get());
VideoFrameMetadata metadata = transformed_video_frame->Metadata();
- sender_->SendVideo(
- transformed_video_frame->GetPayloadType(), metadata.GetCodec(),
- transformed_video_frame->GetTimestamp(),
- /*capture_time=*/Timestamp::MinusInfinity(),
- transformed_video_frame->GetData(),
- transformed_video_frame->GetData().size(),
- RTPVideoHeader::FromMetadata(metadata),
- /*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
- metadata.GetCsrcs());
+ // TODO(bugs.webrtc.org/14708): Use an actual RTT estimate for the
+ // retransmission time instead of a const default, in the same way as a
+ // locally encoded frame.
+ sender_->SendVideo(transformed_video_frame->GetPayloadType(),
+ metadata.GetCodec(),
+ transformed_video_frame->GetTimestamp(),
+ /*capture_time=*/Timestamp::MinusInfinity(),
+ transformed_video_frame->GetData(),
+ transformed_video_frame->GetData().size(),
+ RTPVideoHeader::FromMetadata(metadata),
+ kDefaultRetransmissionsTime, metadata.GetCsrcs());
}
}
@@ -253,13 +278,14 @@ std::unique_ptr<TransformableVideoFrameInterface> CloneSenderVideoFrame(
? VideoFrameType::kVideoFrameKey
: VideoFrameType::kVideoFrameDelta;
// TODO(bugs.webrtc.org/14708): Fill in other EncodedImage parameters
-
+ // TODO(bugs.webrtc.org/14708): Use an actual RTT estimate for the
+ // retransmission time instead of a const default, in the same way as a
+ // locally encoded frame.
VideoFrameMetadata metadata = original->Metadata();
RTPVideoHeader new_header = RTPVideoHeader::FromMetadata(metadata);
return std::make_unique<TransformableVideoSenderFrame>(
encoded_image, new_header, original->GetPayloadType(), new_header.codec,
- original->GetTimestamp(),
- /*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
+ original->GetTimestamp(), kDefaultRetransmissionsTime,
original->GetSsrc(), metadata.GetCsrcs(), original->GetRid());
}
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
index 3379ead364..1f70a23ccc 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h
@@ -76,6 +76,8 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
void OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) override;
+ void StartShortCircuiting() override;
+
// Delegates the call to RTPSendVideo::SendVideo on the `encoder_queue_`.
void SendVideo(std::unique_ptr<TransformableFrameInterface> frame) const
RTC_RUN_ON(transformation_queue_);
@@ -109,6 +111,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
// Used when the encoded frames arrives without a current task queue. This can
// happen if a hardware encoder was used.
std::unique_ptr<TaskQueueBase, TaskQueueDeleter> transformation_queue_;
+ bool short_circuit_ RTC_GUARDED_BY(sender_lock_) = false;
};
// Method to support cloning a Sender frame from another frame
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc
index a376be77b4..6790fc3a71 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc
@@ -83,7 +83,7 @@ class RtpSenderVideoFrameTransformerDelegateTest : public ::testing::Test {
delegate->TransformFrame(
/*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2,
encoded_image, RTPVideoHeader::FromMetadata(metadata),
- /*expected_retransmission_time=*/TimeDelta::PlusInfinity());
+ /*expected_retransmission_time=*/TimeDelta::Millis(10));
return frame;
}
@@ -123,7 +123,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
delegate->TransformFrame(
/*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2,
encoded_image, RTPVideoHeader(),
- /*expected_retransmission_time=*/TimeDelta::PlusInfinity());
+ /*expected_retransmission_time=*/TimeDelta::Millis(10));
}
TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
@@ -260,7 +260,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
test_sender_,
SendVideo(payload_type, absl::make_optional(kVideoCodecVP8), timestamp,
/*capture_time=*/Timestamp::MinusInfinity(), buffer, _, _,
- /*expected_retransmission_time=*/TimeDelta::PlusInfinity(),
+ /*expected_retransmission_time=*/TimeDelta::Millis(10),
frame_csrcs))
.WillOnce(WithoutArgs([&] {
event.Set();
@@ -289,5 +289,29 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, SettingRTPTimestamp) {
EXPECT_EQ(video_frame.GetTimestamp(), rtp_timestamp);
}
+TEST_F(RtpSenderVideoFrameTransformerDelegateTest,
+ ShortCircuitingSkipsTransform) {
+ auto delegate = rtc::make_ref_counted<RTPSenderVideoFrameTransformerDelegate>(
+ &test_sender_, frame_transformer_,
+ /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get());
+ EXPECT_CALL(*frame_transformer_,
+ RegisterTransformedFrameSinkCallback(_, 1111));
+ delegate->Init();
+
+ delegate->StartShortCircuiting();
+
+ // Will not call the actual transformer.
+ EXPECT_CALL(*frame_transformer_, Transform).Times(0);
+ // Will pass the frame straight to the reciever.
+ EXPECT_CALL(test_sender_, SendVideo);
+
+ EncodedImage encoded_image;
+ encoded_image.SetEncodedData(EncodedImageBuffer::Create(1));
+ delegate->TransformFrame(
+ /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2,
+ encoded_image, RTPVideoHeader(),
+ /*expected_retransmission_time=*/TimeDelta::Millis(10));
+}
+
} // namespace
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc
index 94c9249e16..7af945c623 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc
@@ -17,6 +17,7 @@
#include "absl/memory/memory.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
#include "rtc_base/thread.h"
namespace webrtc {
@@ -123,9 +124,14 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::Reset() {
void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame(
std::unique_ptr<RtpFrameObject> frame) {
RTC_DCHECK_RUN_ON(&network_sequence_checker_);
- frame_transformer_->Transform(
- std::make_unique<TransformableVideoReceiverFrame>(std::move(frame), ssrc_,
- receiver_));
+ if (short_circuit_) {
+ // Just pass the frame straight back.
+ receiver_->ManageFrame(std::move(frame));
+ } else {
+ frame_transformer_->Transform(
+ std::make_unique<TransformableVideoReceiverFrame>(std::move(frame),
+ ssrc_, receiver_));
+ }
}
void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame(
@@ -138,6 +144,20 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame(
});
}
+void RtpVideoStreamReceiverFrameTransformerDelegate::StartShortCircuiting() {
+ rtc::scoped_refptr<RtpVideoStreamReceiverFrameTransformerDelegate> delegate(
+ this);
+ network_thread_->PostTask([delegate = std::move(delegate)]() mutable {
+ delegate->StartShortCircuitingOnNetworkSequence();
+ });
+}
+
+void RtpVideoStreamReceiverFrameTransformerDelegate::
+ StartShortCircuitingOnNetworkSequence() {
+ RTC_DCHECK_RUN_ON(&network_sequence_checker_);
+ short_circuit_ = true;
+}
+
void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame(
std::unique_ptr<TransformableFrameInterface> frame) {
RTC_DCHECK_RUN_ON(&network_sequence_checker_);
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h
index 20f9a5caa9..02f2e53923 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h
@@ -54,6 +54,8 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
void OnTransformedFrame(
std::unique_ptr<TransformableFrameInterface> frame) override;
+ void StartShortCircuiting() override;
+
// Delegates the call to RtpVideoFrameReceiver::ManageFrame on the
// `network_thread_`.
void ManageFrame(std::unique_ptr<TransformableFrameInterface> frame);
@@ -62,6 +64,8 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
~RtpVideoStreamReceiverFrameTransformerDelegate() override = default;
private:
+ void StartShortCircuitingOnNetworkSequence();
+
RTC_NO_UNIQUE_ADDRESS SequenceChecker network_sequence_checker_;
RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_);
rtc::scoped_refptr<FrameTransformerInterface> frame_transformer_
@@ -69,6 +73,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate
TaskQueueBase* const network_thread_;
const uint32_t ssrc_;
Clock* const clock_;
+ bool short_circuit_ RTC_GUARDED_BY(network_sequence_checker_) = false;
};
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
index f403c91a74..cf3062610f 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc
@@ -349,5 +349,28 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
}
+TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest,
+ ShortCircuitingSkipsTransform) {
+ rtc::AutoThread main_thread_;
+ TestRtpVideoFrameReceiver receiver;
+ auto mock_frame_transformer =
+ rtc::make_ref_counted<NiceMock<MockFrameTransformer>>();
+ SimulatedClock clock(0);
+ auto delegate =
+ rtc::make_ref_counted<RtpVideoStreamReceiverFrameTransformerDelegate>(
+ &receiver, &clock, mock_frame_transformer, rtc::Thread::Current(),
+ 1111);
+ delegate->Init();
+
+ delegate->StartShortCircuiting();
+ rtc::ThreadManager::ProcessAllMessageQueuesForTesting();
+
+ // Will not call the actual transformer.
+ EXPECT_CALL(*mock_frame_transformer, Transform).Times(0);
+ // Will pass the frame straight to the reciever.
+ EXPECT_CALL(receiver, ManageFrame);
+ delegate->TransformFrame(CreateRtpFrameObject());
+}
+
} // namespace
} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc
index 870f788538..30bbbc5000 100644
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc
@@ -188,8 +188,7 @@ VectorObuInfo ParseObus(
VectorObuInfo obu_infos;
bool expect_continues_obu = false;
for (rtc::ArrayView<const uint8_t> rtp_payload : rtp_payloads) {
- rtc::ByteBufferReader payload(
- reinterpret_cast<const char*>(rtp_payload.data()), rtp_payload.size());
+ rtc::ByteBufferReader payload(rtp_payload);
uint8_t aggregation_header;
if (!payload.ReadUInt8(&aggregation_header)) {
RTC_DLOG(LS_WARNING)
diff --git a/third_party/libwebrtc/modules/third_party/fft/fft_gn/moz.build b/third_party/libwebrtc/modules/third_party/fft/fft_gn/moz.build
index d2e3ea0128..c260743e28 100644
--- a/third_party/libwebrtc/modules/third_party/fft/fft_gn/moz.build
+++ b/third_party/libwebrtc/modules/third_party/fft/fft_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/third_party/g711/g711_3p_gn/moz.build b/third_party/libwebrtc/modules/third_party/g711/g711_3p_gn/moz.build
index aa7a21a680..c2d2597a21 100644
--- a/third_party/libwebrtc/modules/third_party/g711/g711_3p_gn/moz.build
+++ b/third_party/libwebrtc/modules/third_party/g711/g711_3p_gn/moz.build
@@ -184,7 +184,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -194,10 +193,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/third_party/g722/g722_3p_gn/moz.build b/third_party/libwebrtc/modules/third_party/g722/g722_3p_gn/moz.build
index 41a8c05bae..468cc88c65 100644
--- a/third_party/libwebrtc/modules/third_party/g722/g722_3p_gn/moz.build
+++ b/third_party/libwebrtc/modules/third_party/g722/g722_3p_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/utility/utility_gn/moz.build b/third_party/libwebrtc/modules/utility/utility_gn/moz.build
index b6921b7626..6c17ac236e 100644
--- a/third_party/libwebrtc/modules/utility/utility_gn/moz.build
+++ b/third_party/libwebrtc/modules/utility/utility_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
]
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc
index 85b9f20228..106ca1682c 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc
+++ b/third_party/libwebrtc/modules/video_capture/linux/camera_portal.cc
@@ -15,6 +15,7 @@
#include "modules/portal/pipewire_utils.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
+#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
@@ -54,7 +55,9 @@ class CameraPortalPrivate {
GAsyncResult* result,
gpointer user_data);
- CameraPortal::PortalNotifier* notifier_ = nullptr;
+ webrtc::Mutex notifier_lock_;
+ CameraPortal::PortalNotifier* notifier_ RTC_GUARDED_BY(&notifier_lock_) =
+ nullptr;
GDBusConnection* connection_ = nullptr;
GDBusProxy* proxy_ = nullptr;
@@ -66,6 +69,11 @@ CameraPortalPrivate::CameraPortalPrivate(CameraPortal::PortalNotifier* notifier)
: notifier_(notifier) {}
CameraPortalPrivate::~CameraPortalPrivate() {
+ {
+ webrtc::MutexLock lock(&notifier_lock_);
+ notifier_ = nullptr;
+ }
+
if (access_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
access_request_signal_id_);
@@ -229,7 +237,11 @@ void CameraPortalPrivate::OnOpenResponse(GDBusProxy* proxy,
}
void CameraPortalPrivate::OnPortalDone(RequestResponse result, int fd) {
- notifier_->OnCameraRequestResult(result, fd);
+ webrtc::MutexLock lock(&notifier_lock_);
+ if (notifier_) {
+ notifier_->OnCameraRequestResult(result, fd);
+ notifier_ = nullptr;
+ }
}
CameraPortal::CameraPortal(PortalNotifier* notifier)
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc
index ad6cea57b8..31d922035b 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.cc
@@ -20,10 +20,10 @@
#include <vector>
+#include "modules/video_capture/linux/pipewire_session.h"
#include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_defines.h"
#include "modules/video_capture/video_capture_impl.h"
-#include "modules/video_capture/video_capture_options.h"
#include "rtc_base/logging.h"
namespace webrtc {
@@ -38,6 +38,8 @@ int32_t DeviceInfoPipeWire::Init() {
DeviceInfoPipeWire::~DeviceInfoPipeWire() = default;
uint32_t DeviceInfoPipeWire::NumberOfDevices() {
+ RTC_CHECK(pipewire_session_);
+
return pipewire_session_->nodes().size();
}
@@ -50,6 +52,8 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber,
uint32_t productUniqueIdUTF8Length,
pid_t* pid,
bool* deviceIsPlaceholder) {
+ RTC_CHECK(pipewire_session_);
+
if (deviceNumber >= NumberOfDevices())
return -1;
@@ -85,6 +89,8 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber,
int32_t DeviceInfoPipeWire::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) {
+ RTC_CHECK(pipewire_session_);
+
for (auto& node : pipewire_session_->nodes()) {
if (node.unique_id().compare(deviceUniqueIdUTF8) != 0)
continue;
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h
index 1a1324e92b..00715c94bc 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_pipewire.h
@@ -14,7 +14,7 @@
#include <stdint.h>
#include "modules/video_capture/device_info_impl.h"
-#include "modules/video_capture/linux/pipewire_session.h"
+#include "modules/video_capture/video_capture_options.h"
namespace webrtc {
namespace videocapturemodule {
diff --git a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc
index eaeed26b7c..401c38f9c5 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc
+++ b/third_party/libwebrtc/modules/video_capture/linux/device_info_v4l2.cc
@@ -57,24 +57,6 @@
#define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) )
#endif
-// These defines are here to support building on kernel 3.16 which some
-// downstream projects, e.g. Firefox, use.
-// TODO(apehrson): Remove them and their undefs when no longer needed.
-#ifndef V4L2_PIX_FMT_ABGR32
-#define ABGR32_OVERRIDE 1
-#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4')
-#endif
-
-#ifndef V4L2_PIX_FMT_ARGB32
-#define ARGB32_OVERRIDE 1
-#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4')
-#endif
-
-#ifndef V4L2_PIX_FMT_RGBA32
-#define RGBA32_OVERRIDE 1
-#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4')
-#endif
-
namespace webrtc {
namespace videocapturemodule {
#ifdef WEBRTC_LINUX
diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc
index 8af483636a..319824d3c5 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc
+++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_pipewire.cc
@@ -178,8 +178,7 @@ int32_t VideoCaptureModulePipeWire::StartCapture(
int res = pw_stream_connect(
stream_, PW_DIRECTION_INPUT, node_id_,
static_cast<enum pw_stream_flags>(PW_STREAM_FLAG_AUTOCONNECT |
- PW_STREAM_FLAG_DONT_RECONNECT |
- PW_STREAM_FLAG_MAP_BUFFERS),
+ PW_STREAM_FLAG_DONT_RECONNECT),
params.data(), params.size());
if (res != 0) {
RTC_LOG(LS_ERROR) << "Could not connect to camera stream: "
@@ -312,11 +311,11 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) {
0);
}
+ const int buffer_types =
+ (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr);
spa_pod_builder_add(
&builder, SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32),
- SPA_PARAM_BUFFERS_dataType,
- SPA_POD_CHOICE_FLAGS_Int((1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr)),
- 0);
+ SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(buffer_types), 0);
params.push_back(
static_cast<spa_pod*>(spa_pod_builder_pop(&builder, &frame)));
@@ -384,14 +383,15 @@ void VideoCaptureModulePipeWire::ProcessBuffers() {
RTC_CHECK_RUNS_SERIALIZED(&capture_checker_);
while (pw_buffer* buffer = pw_stream_dequeue_buffer(stream_)) {
+ spa_buffer* spaBuffer = buffer->buffer;
struct spa_meta_header* h;
h = static_cast<struct spa_meta_header*>(
- spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h)));
+ spa_buffer_find_meta_data(spaBuffer, SPA_META_Header, sizeof(*h)));
struct spa_meta_videotransform* videotransform;
videotransform =
static_cast<struct spa_meta_videotransform*>(spa_buffer_find_meta_data(
- buffer->buffer, SPA_META_VideoTransform, sizeof(*videotransform)));
+ spaBuffer, SPA_META_VideoTransform, sizeof(*videotransform)));
if (videotransform) {
VideoRotation rotation =
VideorotationFromPipeWireTransform(videotransform->transform);
@@ -401,11 +401,35 @@ void VideoCaptureModulePipeWire::ProcessBuffers() {
if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) {
RTC_LOG(LS_INFO) << "Dropping corruped frame.";
- } else {
- IncomingFrame(static_cast<unsigned char*>(buffer->buffer->datas[0].data),
- buffer->buffer->datas[0].chunk->size,
- configured_capability_);
+ pw_stream_queue_buffer(stream_, buffer);
+ continue;
+ }
+
+ if (spaBuffer->datas[0].type == SPA_DATA_DmaBuf ||
+ spaBuffer->datas[0].type == SPA_DATA_MemFd) {
+ ScopedBuf frame;
+ frame.initialize(
+ static_cast<uint8_t*>(
+ mmap(nullptr,
+ spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset,
+ PROT_READ, MAP_PRIVATE, spaBuffer->datas[0].fd, 0)),
+ spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset,
+ spaBuffer->datas[0].fd, spaBuffer->datas[0].type == SPA_DATA_DmaBuf);
+
+ if (!frame) {
+ RTC_LOG(LS_ERROR) << "Failed to mmap the memory: "
+ << std::strerror(errno);
+ return;
+ }
+
+ IncomingFrame(
+ SPA_MEMBER(frame.get(), spaBuffer->datas[0].mapoffset, uint8_t),
+ spaBuffer->datas[0].chunk->size, configured_capability_);
+ } else { // SPA_DATA_MemPtr
+ IncomingFrame(static_cast<uint8_t*>(spaBuffer->datas[0].data),
+ spaBuffer->datas[0].chunk->size, configured_capability_);
}
+
pw_stream_queue_buffer(stream_, buffer);
}
}
diff --git a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc
index c887683dc8..08d23f7f58 100644
--- a/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc
+++ b/third_party/libwebrtc/modules/video_capture/linux/video_capture_v4l2.cc
@@ -294,7 +294,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture(
if (_captureThread.empty()) {
quit_ = false;
_captureThread = rtc::PlatformThread::SpawnJoinable(
- [self = rtc::scoped_refptr(this)] {
+ [self = scoped_refptr(this)] {
while (self->CaptureProcess()) {
}
},
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture.h b/third_party/libwebrtc/modules/video_capture/video_capture.h
index 378a53b4d2..f59c34f8b2 100644
--- a/third_party/libwebrtc/modules/video_capture/video_capture.h
+++ b/third_party/libwebrtc/modules/video_capture/video_capture.h
@@ -34,7 +34,7 @@ protected:
virtual ~VideoInputFeedBack(){}
};
-class VideoCaptureModule : public rtc::RefCountInterface {
+class VideoCaptureModule : public RefCountInterface {
public:
// Interface for receiving information about available camera devices.
class DeviceInfo {
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build
index 24988a1ffc..f58aa8e782 100644
--- a/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_internal_impl_gn/moz.build
@@ -267,7 +267,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -277,10 +276,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["WEBRTC_USE_PIPEWIRE"] = True
diff --git a/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build
index 49c62d5cf6..820d5655df 100644
--- a/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_capture/video_capture_module_gn/moz.build
@@ -204,7 +204,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -214,10 +213,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/BUILD.gn b/third_party/libwebrtc/modules/video_coding/BUILD.gn
index cca9d8883a..d9e614ff81 100644
--- a/third_party/libwebrtc/modules/video_coding/BUILD.gn
+++ b/third_party/libwebrtc/modules/video_coding/BUILD.gn
@@ -757,7 +757,6 @@ if (rtc_include_tests) {
"../../api/video_codecs:video_codecs_api",
"../../modules/utility:utility",
"../../rtc_base:checks",
- "../../rtc_base:ignore_wundef",
"../../sdk/android:internal_jni",
"../../sdk/android:native_api_base",
"../../sdk/android:native_api_codecs",
@@ -852,8 +851,6 @@ if (rtc_include_tests) {
"../../api:frame_generator_api",
"../../api:scoped_refptr",
"../../api:sequence_checker",
- "../../api:video_codec_stats_api",
- "../../api:video_codec_tester_api",
"../../api:videocodec_test_fixture_api",
"../../api/numerics:numerics",
"../../api/task_queue",
@@ -995,46 +992,6 @@ if (rtc_include_tests) {
]
}
- rtc_library("video_codec_tester") {
- testonly = true
- sources = [
- "codecs/test/video_codec_analyzer.cc",
- "codecs/test/video_codec_analyzer.h",
- "codecs/test/video_codec_stats_impl.cc",
- "codecs/test/video_codec_stats_impl.h",
- "codecs/test/video_codec_tester_impl.cc",
- "codecs/test/video_codec_tester_impl.h",
- ]
-
- deps = [
- ":video_coding_utility",
- "../../api:sequence_checker",
- "../../api:video_codec_stats_api",
- "../../api:video_codec_tester_api",
- "../../api/numerics:numerics",
- "../../api/task_queue:default_task_queue_factory",
- "../../api/test/metrics:metrics_logger",
- "../../api/units:data_rate",
- "../../api/units:frequency",
- "../../api/units:time_delta",
- "../../api/units:timestamp",
- "../../api/video:encoded_image",
- "../../api/video:resolution",
- "../../api/video:video_codec_constants",
- "../../api/video:video_frame",
- "../../rtc_base:checks",
- "../../rtc_base:rtc_event",
- "../../rtc_base:task_queue_for_test",
- "../../rtc_base:timeutils",
- "../../rtc_base/system:no_unique_address",
- "../../system_wrappers",
- "../../test:video_test_support",
- "//third_party/libyuv",
- ]
-
- absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
- }
-
rtc_test("video_codec_perf_tests") {
testonly = true
@@ -1042,28 +999,20 @@ if (rtc_include_tests) {
deps = [
":video_codec_interface",
- ":video_codec_tester",
- "../../api:create_video_codec_tester_api",
- "../../api:video_codec_tester_api",
- "../../api:videocodec_test_stats_api",
"../../api/test/metrics:global_metrics_logger_and_exporter",
"../../api/units:data_rate",
"../../api/units:frequency",
- "../../api/video:encoded_image",
"../../api/video:resolution",
- "../../api/video:video_frame",
- "../../api/video_codecs:scalability_mode",
- "../../api/video_codecs:video_codecs_api",
- "../../media:rtc_internal_video_codecs",
+ "../../api/video_codecs:builtin_video_decoder_factory",
+ "../../api/video_codecs:builtin_video_encoder_factory",
+ "../../modules/video_coding/svc:scalability_mode_util",
"../../rtc_base:logging",
+ "../../rtc_base:stringutils",
"../../test:fileutils",
"../../test:test_flags",
"../../test:test_main",
"../../test:test_support",
- "../../test:video_test_support",
- "../rtp_rtcp:rtp_rtcp_format",
- "svc:scalability_mode_util",
- "//third_party/libyuv",
+ "../../test:video_codec_tester",
]
if (is_android) {
@@ -1191,9 +1140,6 @@ if (rtc_include_tests) {
sources = [
"chain_diff_calculator_unittest.cc",
- "codecs/test/video_codec_analyzer_unittest.cc",
- "codecs/test/video_codec_stats_impl_unittest.cc",
- "codecs/test/video_codec_tester_impl_unittest.cc",
"codecs/test/videocodec_test_fixture_config_unittest.cc",
"codecs/test/videocodec_test_stats_impl_unittest.cc",
"codecs/test/videoprocessor_unittest.cc",
@@ -1248,7 +1194,6 @@ if (rtc_include_tests) {
":packet_buffer",
":simulcast_test_fixture_impl",
":video_codec_interface",
- ":video_codec_tester",
":video_codecs_test_framework",
":video_coding",
":video_coding_legacy",
@@ -1271,7 +1216,6 @@ if (rtc_include_tests) {
"../../api:rtp_packet_info",
"../../api:scoped_refptr",
"../../api:simulcast_test_fixture_api",
- "../../api:video_codec_tester_api",
"../../api:videocodec_test_fixture_api",
"../../api/task_queue",
"../../api/task_queue:default_task_queue_factory",
@@ -1297,6 +1241,7 @@ if (rtc_include_tests) {
"../../common_video/generic_frame_descriptor",
"../../common_video/test:utilities",
"../../media:media_constants",
+ "../../media:rtc_internal_video_codecs",
"../../media:rtc_media_base",
"../../rtc_base:checks",
"../../rtc_base:gunit_helpers",
diff --git a/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build
index dd8e979e41..144097f87a 100644
--- a/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/codec_globals_headers_gn/moz.build b/third_party/libwebrtc/modules/video_coding/codec_globals_headers_gn/moz.build
index 73fce5bf02..cf74ae964c 100644
--- a/third_party/libwebrtc/modules/video_coding/codec_globals_headers_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/codec_globals_headers_gn/moz.build
@@ -180,16 +180,9 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
-if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
-
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
index e67bb6616d..bfe37b935d 100644
--- a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc
deleted file mode 100644
index 772c15734a..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-
-#include <memory>
-
-#include "api/task_queue/default_task_queue_factory.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_codec_constants.h"
-#include "api/video/video_frame.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/event.h"
-#include "rtc_base/time_utils.h"
-#include "third_party/libyuv/include/libyuv/compare.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using Psnr = VideoCodecStats::Frame::Psnr;
-
-Psnr CalcPsnr(const I420BufferInterface& ref_buffer,
- const I420BufferInterface& dec_buffer) {
- RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width());
- RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height());
-
- uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(),
- ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height());
-
- uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(),
- ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2);
-
- uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(),
- ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2);
-
- int num_y_samples = dec_buffer.width() * dec_buffer.height();
- Psnr psnr;
- psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples);
- psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4);
- psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4);
-
- return psnr;
-}
-
-} // namespace
-
-VideoCodecAnalyzer::VideoCodecAnalyzer(
- ReferenceVideoSource* reference_video_source)
- : reference_video_source_(reference_video_source), num_frames_(0) {
- sequence_checker_.Detach();
-}
-
-void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) {
- int64_t encode_start_us = rtc::TimeMicros();
- task_queue_.PostTask(
- [this, timestamp_rtp = input_frame.timestamp(), encode_start_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- RTC_CHECK(frame_num_.find(timestamp_rtp) == frame_num_.end());
- frame_num_[timestamp_rtp] = num_frames_++;
-
- stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp],
- .timestamp_rtp = timestamp_rtp,
- .encode_start = Timestamp::Micros(encode_start_us)});
- });
-}
-
-void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) {
- int64_t encode_finished_us = rtc::TimeMicros();
-
- task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
- spatial_idx = frame.SpatialIndex().value_or(0),
- temporal_idx = frame.TemporalIndex().value_or(0),
- width = frame._encodedWidth,
- height = frame._encodedHeight,
- frame_type = frame._frameType,
- frame_size_bytes = frame.size(), qp = frame.qp_,
- encode_finished_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- if (spatial_idx > 0) {
- VideoCodecStats::Frame* base_frame =
- stats_.GetFrame(timestamp_rtp, /*spatial_idx=*/0);
-
- stats_.AddFrame({.frame_num = base_frame->frame_num,
- .timestamp_rtp = timestamp_rtp,
- .spatial_idx = spatial_idx,
- .encode_start = base_frame->encode_start});
- }
-
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->spatial_idx = spatial_idx;
- fs->temporal_idx = temporal_idx;
- fs->width = width;
- fs->height = height;
- fs->frame_size = DataSize::Bytes(frame_size_bytes);
- fs->qp = qp;
- fs->keyframe = frame_type == VideoFrameType::kVideoFrameKey;
- fs->encode_time = Timestamp::Micros(encode_finished_us) - fs->encode_start;
- fs->encoded = true;
- });
-}
-
-void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) {
- int64_t decode_start_us = rtc::TimeMicros();
- task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
- spatial_idx = frame.SpatialIndex().value_or(0),
- frame_size_bytes = frame.size(), decode_start_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- if (fs == nullptr) {
- if (frame_num_.find(timestamp_rtp) == frame_num_.end()) {
- frame_num_[timestamp_rtp] = num_frames_++;
- }
- stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp],
- .timestamp_rtp = timestamp_rtp,
- .spatial_idx = spatial_idx,
- .frame_size = DataSize::Bytes(frame_size_bytes)});
- fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- }
-
- fs->decode_start = Timestamp::Micros(decode_start_us);
- });
-}
-
-void VideoCodecAnalyzer::FinishDecode(const VideoFrame& frame,
- int spatial_idx) {
- int64_t decode_finished_us = rtc::TimeMicros();
- task_queue_.PostTask([this, timestamp_rtp = frame.timestamp(), spatial_idx,
- width = frame.width(), height = frame.height(),
- decode_finished_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->decode_time = Timestamp::Micros(decode_finished_us) - fs->decode_start;
-
- if (!fs->encoded) {
- fs->width = width;
- fs->height = height;
- }
-
- fs->decoded = true;
- });
-
- if (reference_video_source_ != nullptr) {
- // Copy hardware-backed frame into main memory to release output buffers
- // which number may be limited in hardware decoders.
- rtc::scoped_refptr<I420BufferInterface> decoded_buffer =
- frame.video_frame_buffer()->ToI420();
-
- task_queue_.PostTask([this, decoded_buffer,
- timestamp_rtp = frame.timestamp(), spatial_idx]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- VideoFrame ref_frame = reference_video_source_->GetFrame(
- timestamp_rtp, {.width = decoded_buffer->width(),
- .height = decoded_buffer->height()});
- rtc::scoped_refptr<I420BufferInterface> ref_buffer =
- ref_frame.video_frame_buffer()->ToI420();
-
- Psnr psnr = CalcPsnr(*decoded_buffer, *ref_buffer);
-
- VideoCodecStats::Frame* fs =
- this->stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->psnr = psnr;
- });
- }
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecAnalyzer::GetStats() {
- std::unique_ptr<VideoCodecStats> stats;
- rtc::Event ready;
- task_queue_.PostTask([this, &stats, &ready]() mutable {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- stats.reset(new VideoCodecStatsImpl(stats_));
- ready.Set();
- });
- ready.Wait(rtc::Event::kForever);
- return stats;
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h
deleted file mode 100644
index 29ca8ee2ff..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
-
-#include <map>
-#include <memory>
-
-#include "absl/types/optional.h"
-#include "api/sequence_checker.h"
-#include "api/test/video_codec_tester.h"
-#include "api/video/encoded_image.h"
-#include "api/video/resolution.h"
-#include "api/video/video_frame.h"
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-#include "rtc_base/system/no_unique_address.h"
-#include "rtc_base/task_queue_for_test.h"
-
-namespace webrtc {
-namespace test {
-
-// Analyzer measures and collects metrics necessary for evaluation of video
-// codec quality and performance. This class is thread-safe.
-class VideoCodecAnalyzer {
- public:
- // An interface that provides reference frames for spatial quality analysis.
- class ReferenceVideoSource {
- public:
- virtual ~ReferenceVideoSource() = default;
-
- virtual VideoFrame GetFrame(uint32_t timestamp_rtp,
- Resolution resolution) = 0;
- };
-
- explicit VideoCodecAnalyzer(
- ReferenceVideoSource* reference_video_source = nullptr);
-
- void StartEncode(const VideoFrame& frame);
-
- void FinishEncode(const EncodedImage& frame);
-
- void StartDecode(const EncodedImage& frame);
-
- void FinishDecode(const VideoFrame& frame, int spatial_idx);
-
- std::unique_ptr<VideoCodecStats> GetStats();
-
- protected:
- TaskQueueForTest task_queue_;
-
- ReferenceVideoSource* const reference_video_source_;
-
- VideoCodecStatsImpl stats_ RTC_GUARDED_BY(sequence_checker_);
-
- // Map from RTP timestamp to frame number.
- std::map<uint32_t, int> frame_num_ RTC_GUARDED_BY(sequence_checker_);
-
- // Processed frames counter.
- int num_frames_ RTC_GUARDED_BY(sequence_checker_);
-
- RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc
deleted file mode 100644
index 03146417da..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-
-#include "absl/types/optional.h"
-#include "api/video/i420_buffer.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-#include "third_party/libyuv/include/libyuv/planar_functions.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::Return;
-using ::testing::Values;
-using Psnr = VideoCodecStats::Frame::Psnr;
-
-const uint32_t kTimestamp = 3000;
-const int kSpatialIdx = 2;
-
-class MockReferenceVideoSource
- : public VideoCodecAnalyzer::ReferenceVideoSource {
- public:
- MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override));
-};
-
-VideoFrame CreateVideoFrame(uint32_t timestamp_rtp,
- uint8_t y = 0,
- uint8_t u = 0,
- uint8_t v = 0) {
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(2, 2));
-
- libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
- buffer->MutableDataU(), buffer->StrideU(),
- buffer->MutableDataV(), buffer->StrideV(), 0, 0,
- buffer->width(), buffer->height(), y, u, v);
-
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
-}
-
-EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) {
- EncodedImage encoded_image;
- encoded_image.SetRtpTimestamp(timestamp_rtp);
- encoded_image.SetSpatialIndex(spatial_idx);
- return encoded_image;
-}
-} // namespace
-
-TEST(VideoCodecAnalyzerTest, StartEncode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartEncode(CreateVideoFrame(kTimestamp));
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(fs[0].timestamp_rtp, kTimestamp);
-}
-
-TEST(VideoCodecAnalyzerTest, FinishEncode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartEncode(CreateVideoFrame(kTimestamp));
-
- EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx);
- analyzer.FinishEncode(encoded_frame);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(2u, fs.size());
- EXPECT_EQ(kSpatialIdx, fs[1].spatial_idx);
-}
-
-TEST(VideoCodecAnalyzerTest, StartDecode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(kTimestamp, fs[0].timestamp_rtp);
-}
-
-TEST(VideoCodecAnalyzerTest, FinishDecode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
- VideoFrame decoded_frame = CreateVideoFrame(kTimestamp);
- analyzer.FinishDecode(decoded_frame, kSpatialIdx);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(decoded_frame.width(), fs[0].width);
- EXPECT_EQ(decoded_frame.height(), fs[0].height);
-}
-
-TEST(VideoCodecAnalyzerTest, ReferenceVideoSource) {
- MockReferenceVideoSource reference_video_source;
- VideoCodecAnalyzer analyzer(&reference_video_source);
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
-
- EXPECT_CALL(reference_video_source, GetFrame)
- .WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0,
- /*u=*/0, /*v=*/0)));
-
- analyzer.FinishDecode(
- CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3),
- kSpatialIdx);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_TRUE(fs[0].psnr.has_value());
-
- const Psnr& psnr = *fs[0].psnr;
- EXPECT_NEAR(psnr.y, 48, 1);
- EXPECT_NEAR(psnr.u, 42, 1);
- EXPECT_NEAR(psnr.v, 38, 1);
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc
deleted file mode 100644
index 9808e2a601..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-
-#include <algorithm>
-
-#include "api/numerics/samples_stats_counter.h"
-#include "api/test/metrics/metrics_logger.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/time_utils.h"
-
-namespace webrtc {
-namespace test {
-namespace {
-using Frame = VideoCodecStats::Frame;
-using Stream = VideoCodecStats::Stream;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-class LeakyBucket {
- public:
- LeakyBucket() : level_bits_(0) {}
-
- // Updates bucket level and returns its current level in bits. Data is remove
- // from bucket with rate equal to target bitrate of previous frame. Bucket
- // level is tracked with floating point precision. Returned value of bucket
- // level is rounded up.
- int Update(const Frame& frame) {
- RTC_CHECK(frame.target_bitrate) << "Bitrate must be specified.";
-
- if (prev_frame_) {
- RTC_CHECK_GT(frame.timestamp_rtp, prev_frame_->timestamp_rtp)
- << "Timestamp must increase.";
- TimeDelta passed =
- (frame.timestamp_rtp - prev_frame_->timestamp_rtp) / k90kHz;
- level_bits_ -=
- prev_frame_->target_bitrate->bps() * passed.us() / 1000000.0;
- level_bits_ = std::max(level_bits_, 0.0);
- }
-
- prev_frame_ = frame;
-
- level_bits_ += frame.frame_size.bytes() * 8;
- return static_cast<int>(std::ceil(level_bits_));
- }
-
- private:
- absl::optional<Frame> prev_frame_;
- double level_bits_;
-};
-
-// Merges spatial layer frames into superframes.
-std::vector<Frame> Merge(const std::vector<Frame>& frames) {
- std::vector<Frame> superframes;
- // Map from frame timestamp to index in `superframes` vector.
- std::map<uint32_t, int> index;
-
- for (const auto& f : frames) {
- if (index.find(f.timestamp_rtp) == index.end()) {
- index[f.timestamp_rtp] = static_cast<int>(superframes.size());
- superframes.push_back(f);
- continue;
- }
-
- Frame& sf = superframes[index[f.timestamp_rtp]];
-
- sf.width = std::max(sf.width, f.width);
- sf.height = std::max(sf.height, f.height);
- sf.frame_size += f.frame_size;
- sf.keyframe |= f.keyframe;
-
- sf.encode_time = std::max(sf.encode_time, f.encode_time);
- sf.decode_time = std::max(sf.decode_time, f.decode_time);
-
- if (f.spatial_idx > sf.spatial_idx) {
- if (f.qp) {
- sf.qp = f.qp;
- }
- if (f.psnr) {
- sf.psnr = f.psnr;
- }
- }
-
- sf.spatial_idx = std::max(sf.spatial_idx, f.spatial_idx);
- sf.temporal_idx = std::max(sf.temporal_idx, f.temporal_idx);
-
- sf.encoded |= f.encoded;
- sf.decoded |= f.decoded;
- }
-
- return superframes;
-}
-
-Timestamp RtpToTime(uint32_t timestamp_rtp) {
- return Timestamp::Micros((timestamp_rtp / k90kHz).us());
-}
-
-SamplesStatsCounter::StatsSample StatsSample(double value, Timestamp time) {
- return SamplesStatsCounter::StatsSample{value, time};
-}
-
-TimeDelta CalcTotalDuration(const std::vector<Frame>& frames) {
- RTC_CHECK(!frames.empty());
- TimeDelta duration = TimeDelta::Zero();
- if (frames.size() > 1) {
- duration +=
- (frames.rbegin()->timestamp_rtp - frames.begin()->timestamp_rtp) /
- k90kHz;
- }
-
- // Add last frame duration. If target frame rate is provided, calculate frame
- // duration from it. Otherwise, assume duration of last frame is the same as
- // duration of preceding frame.
- if (frames.rbegin()->target_framerate) {
- duration += 1 / *frames.rbegin()->target_framerate;
- } else {
- RTC_CHECK_GT(frames.size(), 1u);
- duration += (frames.rbegin()->timestamp_rtp -
- std::next(frames.rbegin())->timestamp_rtp) /
- k90kHz;
- }
-
- return duration;
-}
-} // namespace
-
-std::vector<Frame> VideoCodecStatsImpl::Slice(
- absl::optional<Filter> filter) const {
- std::vector<Frame> frames;
- for (const auto& [frame_id, f] : frames_) {
- if (filter.has_value()) {
- if (filter->first_frame.has_value() &&
- f.frame_num < *filter->first_frame) {
- continue;
- }
- if (filter->last_frame.has_value() && f.frame_num > *filter->last_frame) {
- continue;
- }
- if (filter->spatial_idx.has_value() &&
- f.spatial_idx != *filter->spatial_idx) {
- continue;
- }
- if (filter->temporal_idx.has_value() &&
- f.temporal_idx > *filter->temporal_idx) {
- continue;
- }
- }
- frames.push_back(f);
- }
- return frames;
-}
-
-Stream VideoCodecStatsImpl::Aggregate(const std::vector<Frame>& frames) const {
- std::vector<Frame> superframes = Merge(frames);
- RTC_CHECK(!superframes.empty());
-
- LeakyBucket leacky_bucket;
- Stream stream;
- for (size_t i = 0; i < superframes.size(); ++i) {
- Frame& f = superframes[i];
- Timestamp time = RtpToTime(f.timestamp_rtp);
-
- if (!f.frame_size.IsZero()) {
- stream.width.AddSample(StatsSample(f.width, time));
- stream.height.AddSample(StatsSample(f.height, time));
- stream.frame_size_bytes.AddSample(
- StatsSample(f.frame_size.bytes(), time));
- stream.keyframe.AddSample(StatsSample(f.keyframe, time));
- if (f.qp) {
- stream.qp.AddSample(StatsSample(*f.qp, time));
- }
- }
-
- if (f.encoded) {
- stream.encode_time_ms.AddSample(StatsSample(f.encode_time.ms(), time));
- }
-
- if (f.decoded) {
- stream.decode_time_ms.AddSample(StatsSample(f.decode_time.ms(), time));
- }
-
- if (f.psnr) {
- stream.psnr.y.AddSample(StatsSample(f.psnr->y, time));
- stream.psnr.u.AddSample(StatsSample(f.psnr->u, time));
- stream.psnr.v.AddSample(StatsSample(f.psnr->v, time));
- }
-
- if (f.target_framerate) {
- stream.target_framerate_fps.AddSample(
- StatsSample(f.target_framerate->millihertz() / 1000.0, time));
- }
-
- if (f.target_bitrate) {
- stream.target_bitrate_kbps.AddSample(
- StatsSample(f.target_bitrate->bps() / 1000.0, time));
-
- int buffer_level_bits = leacky_bucket.Update(f);
- stream.transmission_time_ms.AddSample(
- StatsSample(buffer_level_bits * rtc::kNumMillisecsPerSec /
- f.target_bitrate->bps(),
- RtpToTime(f.timestamp_rtp)));
- }
- }
-
- TimeDelta duration = CalcTotalDuration(superframes);
- DataRate encoded_bitrate =
- DataSize::Bytes(stream.frame_size_bytes.GetSum()) / duration;
-
- int num_encoded_frames = stream.frame_size_bytes.NumSamples();
- Frequency encoded_framerate = num_encoded_frames / duration;
-
- absl::optional<double> bitrate_mismatch_pct;
- if (auto target_bitrate = superframes.begin()->target_bitrate;
- target_bitrate) {
- bitrate_mismatch_pct = 100.0 *
- (encoded_bitrate.bps() - target_bitrate->bps()) /
- target_bitrate->bps();
- }
-
- absl::optional<double> framerate_mismatch_pct;
- if (auto target_framerate = superframes.begin()->target_framerate;
- target_framerate) {
- framerate_mismatch_pct =
- 100.0 *
- (encoded_framerate.millihertz() - target_framerate->millihertz()) /
- target_framerate->millihertz();
- }
-
- for (auto& f : superframes) {
- Timestamp time = RtpToTime(f.timestamp_rtp);
- stream.encoded_bitrate_kbps.AddSample(
- StatsSample(encoded_bitrate.bps() / 1000.0, time));
-
- stream.encoded_framerate_fps.AddSample(
- StatsSample(encoded_framerate.millihertz() / 1000.0, time));
-
- if (bitrate_mismatch_pct) {
- stream.bitrate_mismatch_pct.AddSample(
- StatsSample(*bitrate_mismatch_pct, time));
- }
-
- if (framerate_mismatch_pct) {
- stream.framerate_mismatch_pct.AddSample(
- StatsSample(*framerate_mismatch_pct, time));
- }
- }
-
- return stream;
-}
-
-void VideoCodecStatsImpl::AddFrame(const Frame& frame) {
- FrameId frame_id{.timestamp_rtp = frame.timestamp_rtp,
- .spatial_idx = frame.spatial_idx};
- RTC_CHECK(frames_.find(frame_id) == frames_.end())
- << "Frame with timestamp_rtp=" << frame.timestamp_rtp
- << " and spatial_idx=" << frame.spatial_idx << " already exists";
-
- frames_[frame_id] = frame;
-}
-
-Frame* VideoCodecStatsImpl::GetFrame(uint32_t timestamp_rtp, int spatial_idx) {
- FrameId frame_id{.timestamp_rtp = timestamp_rtp, .spatial_idx = spatial_idx};
- if (frames_.find(frame_id) == frames_.end()) {
- return nullptr;
- }
- return &frames_.find(frame_id)->second;
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h
deleted file mode 100644
index 77471d2ecd..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "absl/types/optional.h"
-#include "api/test/video_codec_stats.h"
-
-namespace webrtc {
-namespace test {
-
-// Implementation of `VideoCodecStats`. This class is not thread-safe.
-class VideoCodecStatsImpl : public VideoCodecStats {
- public:
- std::vector<Frame> Slice(
- absl::optional<Filter> filter = absl::nullopt) const override;
-
- Stream Aggregate(const std::vector<Frame>& frames) const override;
-
- void AddFrame(const Frame& frame);
-
- // Returns raw pointers to previously added frame. If frame does not exist,
- // returns `nullptr`.
- Frame* GetFrame(uint32_t timestamp_rtp, int spatial_idx);
-
- private:
- struct FrameId {
- uint32_t timestamp_rtp;
- int spatial_idx;
-
- bool operator==(const FrameId& o) const {
- return timestamp_rtp == o.timestamp_rtp && spatial_idx == o.spatial_idx;
- }
-
- bool operator<(const FrameId& o) const {
- if (timestamp_rtp < o.timestamp_rtp)
- return true;
- if (timestamp_rtp == o.timestamp_rtp && spatial_idx < o.spatial_idx)
- return true;
- return false;
- }
- };
-
- std::map<FrameId, Frame> frames_;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc
deleted file mode 100644
index ce11d5abe6..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-
-#include <tuple>
-
-#include "absl/types/optional.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::Return;
-using ::testing::Values;
-using Filter = VideoCodecStats::Filter;
-using Frame = VideoCodecStatsImpl::Frame;
-using Stream = VideoCodecStats::Stream;
-} // namespace
-
-TEST(VideoCodecStatsImpl, AddAndGetFrame) {
- VideoCodecStatsImpl stats;
- stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 0});
- stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 1});
- stats.AddFrame({.timestamp_rtp = 1, .spatial_idx = 0});
-
- Frame* fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/0);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 0u);
- EXPECT_EQ(fs->spatial_idx, 0);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/1);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 0u);
- EXPECT_EQ(fs->spatial_idx, 1);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/0);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 1u);
- EXPECT_EQ(fs->spatial_idx, 0);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/1);
- EXPECT_EQ(fs, nullptr);
-}
-
-class VideoCodecStatsImplSlicingTest
- : public ::testing::TestWithParam<std::tuple<Filter, std::vector<int>>> {};
-
-TEST_P(VideoCodecStatsImplSlicingTest, Slice) {
- Filter filter = std::get<0>(GetParam());
- std::vector<int> expected_frames = std::get<1>(GetParam());
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 0, .temporal_idx = 0},
- {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 1, .temporal_idx = 0},
- {.frame_num = 1, .timestamp_rtp = 1, .spatial_idx = 0, .temporal_idx = 1},
- {.frame_num = 1,
- .timestamp_rtp = 1,
- .spatial_idx = 1,
- .temporal_idx = 1}};
-
- VideoCodecStatsImpl stats;
- stats.AddFrame(frames[0]);
- stats.AddFrame(frames[1]);
- stats.AddFrame(frames[2]);
- stats.AddFrame(frames[3]);
-
- std::vector<VideoCodecStats::Frame> slice = stats.Slice(filter);
- ASSERT_EQ(slice.size(), expected_frames.size());
- for (size_t i = 0; i < expected_frames.size(); ++i) {
- Frame& expected = frames[expected_frames[i]];
- EXPECT_EQ(slice[i].frame_num, expected.frame_num);
- EXPECT_EQ(slice[i].timestamp_rtp, expected.timestamp_rtp);
- EXPECT_EQ(slice[i].spatial_idx, expected.spatial_idx);
- EXPECT_EQ(slice[i].temporal_idx, expected.temporal_idx);
- }
-}
-
-INSTANTIATE_TEST_SUITE_P(
- All,
- VideoCodecStatsImplSlicingTest,
- ::testing::Values(
- std::make_tuple(Filter{}, std::vector<int>{0, 1, 2, 3}),
- std::make_tuple(Filter{.first_frame = 1}, std::vector<int>{2, 3}),
- std::make_tuple(Filter{.last_frame = 0}, std::vector<int>{0, 1}),
- std::make_tuple(Filter{.spatial_idx = 0}, std::vector<int>{0, 2}),
- std::make_tuple(Filter{.temporal_idx = 1},
- std::vector<int>{0, 1, 2, 3})));
-
-TEST(VideoCodecStatsImpl, AggregateBitrate) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(1000),
- .target_bitrate = DataRate::BytesPerSec(1000)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Bytes(2000),
- .target_bitrate = DataRate::BytesPerSec(1000)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- EXPECT_EQ(stream.encoded_bitrate_kbps.GetAverage(), 12.0);
- EXPECT_EQ(stream.bitrate_mismatch_pct.GetAverage(), 50.0);
-}
-
-TEST(VideoCodecStatsImpl, AggregateFramerate) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(1),
- .target_framerate = Frequency::Hertz(1)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Zero(),
- .target_framerate = Frequency::Hertz(1)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- EXPECT_EQ(stream.encoded_framerate_fps.GetAverage(), 0.5);
- EXPECT_EQ(stream.framerate_mismatch_pct.GetAverage(), -50.0);
-}
-
-TEST(VideoCodecStatsImpl, AggregateTransmissionTime) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(2),
- .target_bitrate = DataRate::BytesPerSec(1)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Bytes(3),
- .target_bitrate = DataRate::BytesPerSec(1)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- ASSERT_EQ(stream.transmission_time_ms.NumSamples(), 2);
- ASSERT_EQ(stream.transmission_time_ms.GetSamples()[0], 2000);
- ASSERT_EQ(stream.transmission_time_ms.GetSamples()[1], 4000);
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
index 1c8fe97e84..60c2fcbb6e 100644
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
+++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
@@ -8,41 +8,62 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "api/video_codecs/video_codec.h"
-
-#include <cstddef>
#include <memory>
#include <string>
#include <vector>
#include "absl/flags/flag.h"
#include "absl/functional/any_invocable.h"
-#include "api/test/create_video_codec_tester.h"
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
-#include "api/test/video_codec_tester.h"
-#include "api/test/videocodec_test_stats.h"
#include "api/units/data_rate.h"
#include "api/units/frequency.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
#include "api/video/resolution.h"
-#include "api/video/video_frame.h"
-#include "api/video_codecs/scalability_mode.h"
-#include "api/video_codecs/video_decoder.h"
-#include "api/video_codecs/video_encoder.h"
-#include "media/engine/internal_decoder_factory.h"
-#include "media/engine/internal_encoder_factory.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/video_coding/include/video_error_codes.h"
-#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
#if defined(WEBRTC_ANDROID)
#include "modules/video_coding/codecs/test/android_codec_factory_helper.h"
#endif
+#include "modules/video_coding/svc/scalability_mode_util.h"
#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
#include "test/gtest.h"
#include "test/test_flags.h"
#include "test/testsupport/file_utils.h"
-#include "test/testsupport/frame_reader.h"
+#include "test/video_codec_tester.h"
+
+ABSL_FLAG(std::string,
+ video_name,
+ "FourPeople_1280x720_30",
+ "Name of input video sequence.");
+ABSL_FLAG(std::string,
+ encoder,
+ "libaom-av1",
+ "Encoder: libaom-av1, libvpx-vp9, libvpx-vp8, openh264, hw-vp8, "
+ "hw-vp9, hw-av1, hw-h264, hw-h265");
+ABSL_FLAG(std::string,
+ decoder,
+ "dav1d",
+ "Decoder: dav1d, libvpx-vp9, libvpx-vp8, ffmpeg-h264, hw-vp8, "
+ "hw-vp9, hw-av1, hw-h264, hw-h265");
+ABSL_FLAG(std::string, scalability_mode, "L1T1", "Scalability mode.");
+ABSL_FLAG(int, width, 1280, "Width.");
+ABSL_FLAG(int, height, 720, "Height.");
+ABSL_FLAG(std::vector<std::string>,
+ bitrate_kbps,
+ {"1024"},
+ "Encode target bitrate per layer (l0t0,l0t1,...l1t0,l1t1 and so on) "
+ "in kbps.");
+ABSL_FLAG(double,
+ framerate_fps,
+ 30.0,
+ "Encode target frame rate of the top temporal layer in fps.");
+ABSL_FLAG(int, num_frames, 300, "Number of frames to encode and/or decode.");
+ABSL_FLAG(std::string, test_name, "", "Test name.");
+ABSL_FLAG(bool, dump_decoder_input, false, "Dump decoder input.");
+ABSL_FLAG(bool, dump_decoder_output, false, "Dump decoder output.");
+ABSL_FLAG(bool, dump_encoder_input, false, "Dump encoder input.");
+ABSL_FLAG(bool, dump_encoder_output, false, "Dump encoder output.");
+ABSL_FLAG(bool, write_csv, false, "Write metrics to a CSV file.");
namespace webrtc {
namespace test {
@@ -50,6 +71,10 @@ namespace test {
namespace {
using ::testing::Combine;
using ::testing::Values;
+using VideoSourceSettings = VideoCodecTester::VideoSourceSettings;
+using EncodingSettings = VideoCodecTester::EncodingSettings;
+using VideoCodecStats = VideoCodecTester::VideoCodecStats;
+using Filter = VideoCodecStats::Filter;
using PacingMode = VideoCodecTester::PacingSettings::PacingMode;
struct VideoInfo {
@@ -58,405 +83,93 @@ struct VideoInfo {
Frequency framerate;
};
-struct LayerId {
- int spatial_idx;
- int temporal_idx;
-
- bool operator==(const LayerId& o) const {
- return spatial_idx == o.spatial_idx && temporal_idx == o.temporal_idx;
- }
-
- bool operator<(const LayerId& o) const {
- if (spatial_idx < o.spatial_idx)
- return true;
- if (spatial_idx == o.spatial_idx && temporal_idx < o.temporal_idx)
- return true;
- return false;
- }
-};
-
-struct EncodingSettings {
- ScalabilityMode scalability_mode;
- struct LayerSettings {
- Resolution resolution;
- Frequency framerate;
- DataRate bitrate;
- };
- std::map<LayerId, LayerSettings> layer_settings;
-
- bool IsSameSettings(const EncodingSettings& other) const {
- if (scalability_mode != other.scalability_mode) {
- return false;
- }
-
- for (auto [layer_id, layer] : layer_settings) {
- const auto& other_layer = other.layer_settings.at(layer_id);
- if (layer.resolution != other_layer.resolution) {
- return false;
- }
- }
-
- return true;
- }
-
- bool IsSameRate(const EncodingSettings& other) const {
- for (auto [layer_id, layer] : layer_settings) {
- const auto& other_layer = other.layer_settings.at(layer_id);
- if (layer.bitrate != other_layer.bitrate ||
- layer.framerate != other_layer.framerate) {
- return false;
- }
- }
-
- return true;
- }
-};
-
-const VideoInfo kFourPeople_1280x720_30 = {
- .name = "FourPeople_1280x720_30",
- .resolution = {.width = 1280, .height = 720},
- .framerate = Frequency::Hertz(30)};
-
-class TestRawVideoSource : public VideoCodecTester::RawVideoSource {
- public:
- static constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
- TestRawVideoSource(VideoInfo video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames)
- : video_info_(video_info),
- frame_settings_(frame_settings),
- num_frames_(num_frames),
- frame_num_(0),
- // Start with non-zero timestamp to force using frame RTP timestamps in
- // IvfFrameWriter.
- timestamp_rtp_(90000) {
- // Ensure settings for the first frame are provided.
- RTC_CHECK_GT(frame_settings_.size(), 0u);
- RTC_CHECK_EQ(frame_settings_.begin()->first, 0);
-
- frame_reader_ = CreateYuvFrameReader(
- ResourcePath(video_info_.name, "yuv"), video_info_.resolution,
- YuvFrameReaderImpl::RepeatMode::kPingPong);
- RTC_CHECK(frame_reader_);
- }
-
- // Pulls next frame. Frame RTP timestamp is set accordingly to
- // `EncodingSettings::framerate`.
- absl::optional<VideoFrame> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt; // End of stream.
- }
-
- const EncodingSettings& encoding_settings =
- std::prev(frame_settings_.upper_bound(frame_num_))->second;
-
- Resolution resolution =
- encoding_settings.layer_settings.begin()->second.resolution;
- Frequency framerate =
- encoding_settings.layer_settings.begin()->second.framerate;
-
- int pulled_frame;
- auto buffer = frame_reader_->PullFrame(
- &pulled_frame, resolution,
- {.num = static_cast<int>(framerate.millihertz()),
- .den = static_cast<int>(video_info_.framerate.millihertz())});
- RTC_CHECK(buffer) << "Cannot pull frame " << frame_num_;
-
- auto frame = VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp_)
- .set_timestamp_us((timestamp_rtp_ / k90kHz).us())
- .build();
-
- pulled_frames_[timestamp_rtp_] = pulled_frame;
- timestamp_rtp_ += k90kHz / framerate;
- ++frame_num_;
-
- return frame;
- }
-
- // Reads frame specified by `timestamp_rtp`, scales it to `resolution` and
- // returns. Frame with the given `timestamp_rtp` is expected to be pulled
- // before.
- VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override {
- RTC_CHECK(pulled_frames_.find(timestamp_rtp) != pulled_frames_.end())
- << "Frame with RTP timestamp " << timestamp_rtp
- << " was not pulled before";
- auto buffer =
- frame_reader_->ReadFrame(pulled_frames_[timestamp_rtp], resolution);
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
- }
-
- protected:
- VideoInfo video_info_;
- std::unique_ptr<FrameReader> frame_reader_;
- const std::map<int, EncodingSettings>& frame_settings_;
- int num_frames_;
- int frame_num_;
- uint32_t timestamp_rtp_;
- std::map<uint32_t, int> pulled_frames_;
-};
-
-class TestEncoder : public VideoCodecTester::Encoder,
- public EncodedImageCallback {
- public:
- TestEncoder(std::unique_ptr<VideoEncoder> encoder,
- const std::string codec_type,
- const std::map<int, EncodingSettings>& frame_settings)
- : encoder_(std::move(encoder)),
- codec_type_(codec_type),
- frame_settings_(frame_settings),
- frame_num_(0) {
- // Ensure settings for the first frame is provided.
- RTC_CHECK_GT(frame_settings_.size(), 0u);
- RTC_CHECK_EQ(frame_settings_.begin()->first, 0);
-
- encoder_->RegisterEncodeCompleteCallback(this);
- }
-
- void Initialize() override {
- const EncodingSettings& first_frame_settings = frame_settings_.at(0);
- Configure(first_frame_settings);
- SetRates(first_frame_settings);
- }
-
- void Encode(const VideoFrame& frame, EncodeCallback callback) override {
- {
- MutexLock lock(&mutex_);
- callbacks_[frame.timestamp()] = std::move(callback);
- }
-
- if (auto fs = frame_settings_.find(frame_num_);
- fs != frame_settings_.begin() && fs != frame_settings_.end()) {
- if (!fs->second.IsSameSettings(std::prev(fs)->second)) {
- Configure(fs->second);
- } else if (!fs->second.IsSameRate(std::prev(fs)->second)) {
- SetRates(fs->second);
- }
- }
-
- encoder_->Encode(frame, nullptr);
- ++frame_num_;
- }
-
- void Flush() override {
- // TODO(webrtc:14852): For codecs which buffer frames we need a to
- // flush them to get last frames. Add such functionality to VideoEncoder
- // API. On Android it will map directly to `MediaCodec.flush()`.
- encoder_->Release();
- }
-
- VideoEncoder* encoder() { return encoder_.get(); }
-
- protected:
- Result OnEncodedImage(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info) override {
- MutexLock lock(&mutex_);
- auto cb = callbacks_.find(encoded_image.RtpTimestamp());
- RTC_CHECK(cb != callbacks_.end());
- cb->second(encoded_image);
-
- callbacks_.erase(callbacks_.begin(), cb);
- return Result(Result::Error::OK);
- }
-
- void Configure(const EncodingSettings& es) {
- VideoCodec vc;
- const EncodingSettings::LayerSettings& layer_settings =
- es.layer_settings.begin()->second;
- vc.width = layer_settings.resolution.width;
- vc.height = layer_settings.resolution.height;
- const DataRate& bitrate = layer_settings.bitrate;
- vc.startBitrate = bitrate.kbps();
- vc.maxBitrate = bitrate.kbps();
- vc.minBitrate = 0;
- vc.maxFramerate = static_cast<uint32_t>(layer_settings.framerate.hertz());
- vc.active = true;
- vc.qpMax = 63;
- vc.numberOfSimulcastStreams = 0;
- vc.mode = webrtc::VideoCodecMode::kRealtimeVideo;
- vc.SetFrameDropEnabled(true);
- vc.SetScalabilityMode(es.scalability_mode);
-
- vc.codecType = PayloadStringToCodecType(codec_type_);
- if (vc.codecType == kVideoCodecVP8) {
- *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings();
- } else if (vc.codecType == kVideoCodecVP9) {
- *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings();
- } else if (vc.codecType == kVideoCodecH264) {
- *(vc.H264()) = VideoEncoder::GetDefaultH264Settings();
- }
-
- VideoEncoder::Settings ves(
- VideoEncoder::Capabilities(/*loss_notification=*/false),
- /*number_of_cores=*/1,
- /*max_payload_size=*/1440);
-
- int result = encoder_->InitEncode(&vc, ves);
- ASSERT_EQ(result, WEBRTC_VIDEO_CODEC_OK);
-
- SetRates(es);
- }
-
- void SetRates(const EncodingSettings& es) {
- VideoEncoder::RateControlParameters rc;
- int num_spatial_layers =
- ScalabilityModeToNumSpatialLayers(es.scalability_mode);
- int num_temporal_layers =
- ScalabilityModeToNumSpatialLayers(es.scalability_mode);
- for (int sidx = 0; sidx < num_spatial_layers; ++sidx) {
- for (int tidx = 0; tidx < num_temporal_layers; ++tidx) {
- auto layer_settings =
- es.layer_settings.find({.spatial_idx = sidx, .temporal_idx = tidx});
- RTC_CHECK(layer_settings != es.layer_settings.end())
- << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set";
- rc.bitrate.SetBitrate(sidx, tidx, layer_settings->second.bitrate.bps());
- }
- }
-
- rc.framerate_fps =
- es.layer_settings.begin()->second.framerate.millihertz() / 1000.0;
- encoder_->SetRates(rc);
- }
-
- std::unique_ptr<VideoEncoder> encoder_;
- const std::string codec_type_;
- const std::map<int, EncodingSettings>& frame_settings_;
- int frame_num_;
- std::map<uint32_t, EncodeCallback> callbacks_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-class TestDecoder : public VideoCodecTester::Decoder,
- public DecodedImageCallback {
- public:
- TestDecoder(std::unique_ptr<VideoDecoder> decoder,
- const std::string codec_type)
- : decoder_(std::move(decoder)), codec_type_(codec_type) {
- decoder_->RegisterDecodeCompleteCallback(this);
- }
-
- void Initialize() override {
- VideoDecoder::Settings ds;
- ds.set_codec_type(PayloadStringToCodecType(codec_type_));
- ds.set_number_of_cores(1);
- ds.set_max_render_resolution({1280, 720});
-
- bool result = decoder_->Configure(ds);
- ASSERT_TRUE(result);
- }
-
- void Decode(const EncodedImage& frame, DecodeCallback callback) override {
- {
- MutexLock lock(&mutex_);
- callbacks_[frame.RtpTimestamp()] = std::move(callback);
- }
-
- decoder_->Decode(frame, /*render_time_ms=*/0);
- }
-
- void Flush() override {
- // TODO(webrtc:14852): For codecs which buffer frames we need a to
- // flush them to get last frames. Add such functionality to VideoDecoder
- // API. On Android it will map directly to `MediaCodec.flush()`.
- decoder_->Release();
- }
-
- VideoDecoder* decoder() { return decoder_.get(); }
-
- protected:
- int Decoded(VideoFrame& decoded_frame) override {
- MutexLock lock(&mutex_);
- auto cb = callbacks_.find(decoded_frame.timestamp());
- RTC_CHECK(cb != callbacks_.end());
- cb->second(decoded_frame);
+const std::map<std::string, VideoInfo> kRawVideos = {
+ {"FourPeople_1280x720_30",
+ {.name = "FourPeople_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"vidyo1_1280x720_30",
+ {.name = "vidyo1_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"vidyo4_1280x720_30",
+ {.name = "vidyo4_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"KristenAndSara_1280x720_30",
+ {.name = "KristenAndSara_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"Johnny_1280x720_30",
+ {.name = "Johnny_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}}};
+
+static constexpr Frequency k90kHz = Frequency::Hertz(90000);
+
+std::string CodecNameToCodecType(std::string name) {
+ if (name.find("av1") != std::string::npos) {
+ return "AV1";
+ }
+ if (name.find("vp9") != std::string::npos) {
+ return "VP9";
+ }
+ if (name.find("vp8") != std::string::npos) {
+ return "VP8";
+ }
+ if (name.find("h264") != std::string::npos) {
+ return "H264";
+ }
+ if (name.find("h265") != std::string::npos) {
+ return "H265";
+ }
+ RTC_CHECK_NOTREACHED();
+}
- callbacks_.erase(callbacks_.begin(), cb);
- return WEBRTC_VIDEO_CODEC_OK;
+// TODO(webrtc:14852): Make Create[Encoder,Decoder]Factory to work with codec
+// name directly.
+std::string CodecNameToCodecImpl(std::string name) {
+ if (name.find("hw") != std::string::npos) {
+ return "mediacodec";
}
-
- std::unique_ptr<VideoDecoder> decoder_;
- const std::string codec_type_;
- std::map<uint32_t, DecodeCallback> callbacks_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-std::unique_ptr<TestRawVideoSource> CreateVideoSource(
- const VideoInfo& video,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames) {
- return std::make_unique<TestRawVideoSource>(video, frame_settings,
- num_frames);
+ return "builtin";
}
-std::unique_ptr<TestEncoder> CreateEncoder(
- std::string type,
- std::string impl,
- const std::map<int, EncodingSettings>& frame_settings) {
- std::unique_ptr<VideoEncoderFactory> factory;
+std::unique_ptr<VideoEncoderFactory> CreateEncoderFactory(std::string impl) {
if (impl == "builtin") {
- factory = std::make_unique<InternalEncoderFactory>();
- } else if (impl == "mediacodec") {
+ return CreateBuiltinVideoEncoderFactory();
+ }
#if defined(WEBRTC_ANDROID)
- InitializeAndroidObjects();
- factory = CreateAndroidEncoderFactory();
+ InitializeAndroidObjects();
+ return CreateAndroidEncoderFactory();
+#else
+ return nullptr;
#endif
- }
- std::unique_ptr<VideoEncoder> encoder =
- factory->CreateVideoEncoder(SdpVideoFormat(type));
- if (encoder == nullptr) {
- return nullptr;
- }
- return std::make_unique<TestEncoder>(std::move(encoder), type,
- frame_settings);
}
-std::unique_ptr<TestDecoder> CreateDecoder(std::string type, std::string impl) {
- std::unique_ptr<VideoDecoderFactory> factory;
+std::unique_ptr<VideoDecoderFactory> CreateDecoderFactory(std::string impl) {
if (impl == "builtin") {
- factory = std::make_unique<InternalDecoderFactory>();
- } else if (impl == "mediacodec") {
+ return CreateBuiltinVideoDecoderFactory();
+ }
#if defined(WEBRTC_ANDROID)
- InitializeAndroidObjects();
- factory = CreateAndroidDecoderFactory();
+ InitializeAndroidObjects();
+ return CreateAndroidDecoderFactory();
+#else
+ return nullptr;
#endif
- }
- std::unique_ptr<VideoDecoder> decoder =
- factory->CreateVideoDecoder(SdpVideoFormat(type));
- if (decoder == nullptr) {
- return nullptr;
- }
- return std::make_unique<TestDecoder>(std::move(decoder), type);
}
-void SetTargetRates(const std::map<int, EncodingSettings>& frame_settings,
- std::vector<VideoCodecStats::Frame>& frames) {
- for (VideoCodecStats::Frame& f : frames) {
- const EncodingSettings& encoding_settings =
- std::prev(frame_settings.upper_bound(f.frame_num))->second;
- LayerId layer_id = {.spatial_idx = f.spatial_idx,
- .temporal_idx = f.temporal_idx};
- RTC_CHECK(encoding_settings.layer_settings.find(layer_id) !=
- encoding_settings.layer_settings.end())
- << "Frame frame_num=" << f.frame_num
- << " belongs to spatial_idx=" << f.spatial_idx
- << " temporal_idx=" << f.temporal_idx
- << " but settings for this layer are not provided.";
- const EncodingSettings::LayerSettings& layer_settings =
- encoding_settings.layer_settings.at(layer_id);
- f.target_bitrate = layer_settings.bitrate;
- f.target_framerate = layer_settings.framerate;
+std::string TestName() {
+ std::string test_name = absl::GetFlag(FLAGS_test_name);
+ if (!test_name.empty()) {
+ return test_name;
}
+ return ::testing::UnitTest::GetInstance()->current_test_info()->name();
}
std::string TestOutputPath() {
std::string output_path =
- OutputPath() +
- ::testing::UnitTest::GetInstance()->current_test_info()->name();
+ (rtc::StringBuilder() << OutputPath() << TestName()).str();
std::string output_dir = DirName(output_path);
bool result = CreateDir(output_dir);
RTC_CHECK(result) << "Cannot create " << output_dir;
@@ -465,116 +178,120 @@ std::string TestOutputPath() {
} // namespace
std::unique_ptr<VideoCodecStats> RunEncodeDecodeTest(
- std::string codec_type,
std::string codec_impl,
const VideoInfo& video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames,
- bool save_codec_input,
- bool save_codec_output) {
- std::unique_ptr<TestRawVideoSource> video_source =
- CreateVideoSource(video_info, frame_settings, num_frames);
-
- std::unique_ptr<TestEncoder> encoder =
- CreateEncoder(codec_type, codec_impl, frame_settings);
- if (encoder == nullptr) {
+ const std::map<uint32_t, EncodingSettings>& encoding_settings) {
+ VideoSourceSettings source_settings{
+ .file_path = ResourcePath(video_info.name, "yuv"),
+ .resolution = video_info.resolution,
+ .framerate = video_info.framerate};
+
+ const SdpVideoFormat& sdp_video_format =
+ encoding_settings.begin()->second.sdp_video_format;
+
+ std::unique_ptr<VideoEncoderFactory> encoder_factory =
+ CreateEncoderFactory(codec_impl);
+ if (!encoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*scalability_mode=*/absl::nullopt)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No encoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
- std::unique_ptr<TestDecoder> decoder = CreateDecoder(codec_type, codec_impl);
- if (decoder == nullptr) {
- // If platform decoder is not available try built-in one.
- if (codec_impl == "builtin") {
- return nullptr;
- }
-
- decoder = CreateDecoder(codec_type, "builtin");
- if (decoder == nullptr) {
+ std::unique_ptr<VideoDecoderFactory> decoder_factory =
+ CreateDecoderFactory(codec_impl);
+ if (!decoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*reference_scaling=*/false)
+ .is_supported) {
+ decoder_factory = CreateDecoderFactory("builtin");
+ if (!decoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*reference_scaling=*/false)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No decoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
}
- RTC_LOG(LS_INFO) << "Encoder implementation: "
- << encoder->encoder()->GetEncoderInfo().implementation_name;
- RTC_LOG(LS_INFO) << "Decoder implementation: "
- << decoder->decoder()->GetDecoderInfo().implementation_name;
+ std::string output_path = TestOutputPath();
VideoCodecTester::EncoderSettings encoder_settings;
- encoder_settings.pacing.mode =
- encoder->encoder()->GetEncoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
+ encoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_encoder_input)) {
+ encoder_settings.encoder_input_base_path = output_path + "_enc_input";
+ }
+ if (absl::GetFlag(FLAGS_dump_encoder_output)) {
+ encoder_settings.encoder_output_base_path = output_path + "_enc_output";
+ }
VideoCodecTester::DecoderSettings decoder_settings;
- decoder_settings.pacing.mode =
- decoder->decoder()->GetDecoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
-
- std::string output_path = TestOutputPath();
- if (save_codec_input) {
- encoder_settings.encoder_input_base_path = output_path + "_enc_input";
+ decoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_decoder_input)) {
decoder_settings.decoder_input_base_path = output_path + "_dec_input";
}
- if (save_codec_output) {
- encoder_settings.encoder_output_base_path = output_path + "_enc_output";
+ if (absl::GetFlag(FLAGS_dump_decoder_output)) {
decoder_settings.decoder_output_base_path = output_path + "_dec_output";
}
- std::unique_ptr<VideoCodecTester> tester = CreateVideoCodecTester();
- return tester->RunEncodeDecodeTest(video_source.get(), encoder.get(),
- decoder.get(), encoder_settings,
- decoder_settings);
+ return VideoCodecTester::RunEncodeDecodeTest(
+ source_settings, encoder_factory.get(), decoder_factory.get(),
+ encoder_settings, decoder_settings, encoding_settings);
}
std::unique_ptr<VideoCodecStats> RunEncodeTest(
std::string codec_type,
std::string codec_impl,
const VideoInfo& video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames,
- bool save_codec_input,
- bool save_codec_output) {
- std::unique_ptr<TestRawVideoSource> video_source =
- CreateVideoSource(video_info, frame_settings, num_frames);
-
- std::unique_ptr<TestEncoder> encoder =
- CreateEncoder(codec_type, codec_impl, frame_settings);
- if (encoder == nullptr) {
+ const std::map<uint32_t, EncodingSettings>& encoding_settings) {
+ VideoSourceSettings source_settings{
+ .file_path = ResourcePath(video_info.name, "yuv"),
+ .resolution = video_info.resolution,
+ .framerate = video_info.framerate};
+
+ const SdpVideoFormat& sdp_video_format =
+ encoding_settings.begin()->second.sdp_video_format;
+
+ std::unique_ptr<VideoEncoderFactory> encoder_factory =
+ CreateEncoderFactory(codec_impl);
+ if (!encoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*scalability_mode=*/absl::nullopt)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No encoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
- RTC_LOG(LS_INFO) << "Encoder implementation: "
- << encoder->encoder()->GetEncoderInfo().implementation_name;
-
- VideoCodecTester::EncoderSettings encoder_settings;
- encoder_settings.pacing.mode =
- encoder->encoder()->GetEncoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
-
std::string output_path = TestOutputPath();
- if (save_codec_input) {
+ VideoCodecTester::EncoderSettings encoder_settings;
+ encoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_encoder_input)) {
encoder_settings.encoder_input_base_path = output_path + "_enc_input";
}
- if (save_codec_output) {
+ if (absl::GetFlag(FLAGS_dump_encoder_output)) {
encoder_settings.encoder_output_base_path = output_path + "_enc_output";
}
- std::unique_ptr<VideoCodecTester> tester = CreateVideoCodecTester();
- return tester->RunEncodeTest(video_source.get(), encoder.get(),
- encoder_settings);
+ return VideoCodecTester::RunEncodeTest(source_settings, encoder_factory.get(),
+ encoder_settings, encoding_settings);
}
-class SpatialQualityTest : public ::testing::TestWithParam<
- std::tuple</*codec_type=*/std::string,
- /*codec_impl=*/std::string,
- VideoInfo,
- std::tuple</*width=*/int,
- /*height=*/int,
- /*framerate_fps=*/double,
- /*bitrate_kbps=*/int,
- /*min_psnr=*/double>>> {
+class SpatialQualityTest : public ::testing::TestWithParam<std::tuple<
+ /*codec_type=*/std::string,
+ /*codec_impl=*/std::string,
+ VideoInfo,
+ std::tuple</*width=*/int,
+ /*height=*/int,
+ /*framerate_fps=*/double,
+ /*bitrate_kbps=*/int,
+ /*expected_min_psnr=*/double>>> {
public:
static std::string TestParamsToString(
const ::testing::TestParamInfo<SpatialQualityTest::ParamType>& info) {
@@ -590,41 +307,35 @@ class SpatialQualityTest : public ::testing::TestWithParam<
TEST_P(SpatialQualityTest, SpatialQuality) {
auto [codec_type, codec_impl, video_info, coding_settings] = GetParam();
- auto [width, height, framerate_fps, bitrate_kbps, psnr] = coding_settings;
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.scalability_mode = ScalabilityMode::kL1T1,
- .layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = width, .height = height},
- .framerate = Frequency::MilliHertz(1000 * framerate_fps),
- .bitrate = DataRate::KilobitsPerSec(bitrate_kbps)}}}}}};
-
+ auto [width, height, framerate_fps, bitrate_kbps, expected_min_psnr] =
+ coding_settings;
int duration_s = 10;
int num_frames = duration_s * framerate_fps;
- std::unique_ptr<VideoCodecStats> stats = RunEncodeDecodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+ std::map<uint32_t, EncodingSettings> frames_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1", width, height,
+ {bitrate_kbps}, framerate_fps, num_frames);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeDecodeTest(codec_impl, video_info, frames_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames = stats->Slice();
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate(Filter{});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
- EXPECT_GE(stream.psnr.y.GetAverage(), psnr);
+ EXPECT_GE(stream.psnr.y.GetAverage(), expected_min_psnr);
}
}
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
- {{"codec_type", codec_type},
- {"codec_impl", codec_impl},
- {"video_name", video_info.name}});
+ {{"video_name", video_info.name},
+ {"codec_type", codec_type},
+ {"codec_impl", codec_impl}});
}
INSTANTIATE_TEST_SUITE_P(
@@ -636,7 +347,7 @@ INSTANTIATE_TEST_SUITE_P(
#else
Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
Values(std::make_tuple(320, 180, 30, 32, 28),
std::make_tuple(320, 180, 30, 64, 30),
std::make_tuple(320, 180, 30, 128, 33),
@@ -671,33 +382,32 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
auto [codec_type, codec_impl, video_info, bitrate_kbps] = GetParam();
int duration_s = 10; // Duration of fixed rate interval.
- int first_frame = duration_s * video_info.framerate.millihertz() / 1000;
- int num_frames = 2 * duration_s * video_info.framerate.millihertz() / 1000;
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = video_info.framerate,
- .bitrate = DataRate::KilobitsPerSec(
- bitrate_kbps.first)}}}}},
- {first_frame,
- {.layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = video_info.framerate,
- .bitrate = DataRate::KilobitsPerSec(bitrate_kbps.second)}}}}}};
-
- std::unique_ptr<VideoCodecStats> stats = RunEncodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+ int num_frames =
+ static_cast<int>(duration_s * video_info.framerate.hertz<double>());
+
+ std::map<uint32_t, EncodingSettings> encoding_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360, {bitrate_kbps.first},
+ /*framerate_fps=*/30, num_frames);
+
+ uint32_t initial_timestamp_rtp =
+ encoding_settings.rbegin()->first + k90kHz / Frequency::Hertz(30);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings2 =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360, {bitrate_kbps.second},
+ /*framerate_fps=*/30, num_frames, initial_timestamp_rtp);
+
+ encoding_settings.merge(encoding_settings2);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeTest(codec_type, codec_impl, video_info, encoding_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames =
- stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame});
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate({.min_timestamp_rtp = initial_timestamp_rtp});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10);
EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10);
@@ -707,6 +417,7 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
{{"codec_type", codec_type},
{"codec_impl", codec_impl},
@@ -715,18 +426,18 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
std::to_string(bitrate_kbps.second)}});
}
-INSTANTIATE_TEST_SUITE_P(All,
- BitrateAdaptationTest,
- Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ BitrateAdaptationTest,
+ Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
#if defined(WEBRTC_ANDROID)
- Values("builtin", "mediacodec"),
+ Values("builtin", "mediacodec"),
#else
- Values("builtin"),
+ Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
- Values(std::pair(1024, 512),
- std::pair(512, 1024))),
- BitrateAdaptationTest::TestParamsToString);
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
+ Values(std::pair(1024, 512), std::pair(512, 1024))),
+ BitrateAdaptationTest::TestParamsToString);
class FramerateAdaptationTest
: public ::testing::TestWithParam<std::tuple</*codec_type=*/std::string,
@@ -749,34 +460,34 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
auto [codec_type, codec_impl, video_info, framerate_fps] = GetParam();
int duration_s = 10; // Duration of fixed rate interval.
- int first_frame = static_cast<int>(duration_s * framerate_fps.first);
- int num_frames = static_cast<int>(
- duration_s * (framerate_fps.first + framerate_fps.second));
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = Frequency::MilliHertz(
- 1000 * framerate_fps.first),
- .bitrate = DataRate::KilobitsPerSec(512)}}}}},
- {first_frame,
- {.layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = Frequency::MilliHertz(1000 * framerate_fps.second),
- .bitrate = DataRate::KilobitsPerSec(512)}}}}}};
-
- std::unique_ptr<VideoCodecStats> stats = RunEncodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360,
+ /*layer_bitrates_kbps=*/{512}, framerate_fps.first,
+ static_cast<int>(duration_s * framerate_fps.first));
+
+ uint32_t initial_timestamp_rtp =
+ encoding_settings.rbegin()->first +
+ k90kHz / Frequency::Hertz(framerate_fps.first);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings2 =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1", /*width=*/640,
+ /*height=*/360,
+ /*layer_bitrates_kbps=*/{512}, framerate_fps.second,
+ static_cast<int>(duration_s * framerate_fps.second),
+ initial_timestamp_rtp);
+
+ encoding_settings.merge(encoding_settings2);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeTest(codec_type, codec_impl, video_info, encoding_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames =
- stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame});
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate({.min_timestamp_rtp = initial_timestamp_rtp});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10);
EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10);
@@ -786,6 +497,7 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
{{"codec_type", codec_type},
{"codec_impl", codec_impl},
@@ -794,17 +506,71 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
std::to_string(framerate_fps.second)}});
}
-INSTANTIATE_TEST_SUITE_P(All,
- FramerateAdaptationTest,
- Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ FramerateAdaptationTest,
+ Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
#if defined(WEBRTC_ANDROID)
- Values("builtin", "mediacodec"),
+ Values("builtin", "mediacodec"),
#else
- Values("builtin"),
+ Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
- Values(std::pair(30, 15), std::pair(15, 30))),
- FramerateAdaptationTest::TestParamsToString);
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
+ Values(std::pair(30, 15), std::pair(15, 30))),
+ FramerateAdaptationTest::TestParamsToString);
+
+TEST(VideoCodecTest, DISABLED_EncodeDecode) {
+ std::vector<std::string> bitrate_str = absl::GetFlag(FLAGS_bitrate_kbps);
+ std::vector<int> bitrate_kbps;
+ std::transform(bitrate_str.begin(), bitrate_str.end(),
+ std::back_inserter(bitrate_kbps),
+ [](const std::string& str) { return std::stoi(str); });
+
+ std::map<uint32_t, EncodingSettings> frames_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ CodecNameToCodecType(absl::GetFlag(FLAGS_encoder)),
+ absl::GetFlag(FLAGS_scalability_mode), absl::GetFlag(FLAGS_width),
+ absl::GetFlag(FLAGS_height), {bitrate_kbps},
+ absl::GetFlag(FLAGS_framerate_fps), absl::GetFlag(FLAGS_num_frames));
+
+ // TODO(webrtc:14852): Pass encoder and decoder names directly, and update
+ // logged test name (implies lossing history in the chromeperf dashboard).
+ // Sync with changes in Stream::LogMetrics (see TODOs there).
+ std::unique_ptr<VideoCodecStats> stats = RunEncodeDecodeTest(
+ CodecNameToCodecImpl(absl::GetFlag(FLAGS_encoder)),
+ kRawVideos.at(absl::GetFlag(FLAGS_video_name)), frames_settings);
+ ASSERT_NE(nullptr, stats);
+
+ // Log unsliced metrics.
+ VideoCodecStats::Stream stream = stats->Aggregate(Filter{});
+ stream.LogMetrics(GetGlobalMetricsLogger(), TestName(), /*prefix=*/"",
+ /*metadata=*/{});
+
+ // Log metrics sliced on spatial and temporal layer.
+ ScalabilityMode scalability_mode =
+ *ScalabilityModeFromString(absl::GetFlag(FLAGS_scalability_mode));
+ int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode);
+ int num_temporal_layers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ for (int sidx = 0; sidx < num_spatial_layers; ++sidx) {
+ for (int tidx = 0; tidx < num_temporal_layers; ++tidx) {
+ std::string metric_name_prefix =
+ (rtc::StringBuilder() << "s" << sidx << "t" << tidx << "_").str();
+ stream = stats->Aggregate(
+ {.layer_id = {{.spatial_idx = sidx, .temporal_idx = tidx}}});
+ stream.LogMetrics(GetGlobalMetricsLogger(), TestName(),
+ metric_name_prefix,
+ /*metadata=*/{});
+ }
+ }
+
+ if (absl::GetFlag(FLAGS_write_csv)) {
+ stats->LogMetrics(
+ (rtc::StringBuilder() << TestOutputPath() << ".csv").str(),
+ stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/
+ {{"test_name", TestName()}});
+ }
+}
} // namespace test
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc
deleted file mode 100644
index f15b1b35f3..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc
+++ /dev/null
@@ -1,437 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_tester_impl.h"
-
-#include <map>
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "api/task_queue/default_task_queue_factory.h"
-#include "api/units/frequency.h"
-#include "api/units/time_delta.h"
-#include "api/units/timestamp.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_codec_type.h"
-#include "api/video/video_frame.h"
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-#include "modules/video_coding/utility/ivf_file_writer.h"
-#include "rtc_base/event.h"
-#include "rtc_base/time_utils.h"
-#include "system_wrappers/include/sleep.h"
-#include "test/testsupport/video_frame_writer.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using RawVideoSource = VideoCodecTester::RawVideoSource;
-using CodedVideoSource = VideoCodecTester::CodedVideoSource;
-using Decoder = VideoCodecTester::Decoder;
-using Encoder = VideoCodecTester::Encoder;
-using EncoderSettings = VideoCodecTester::EncoderSettings;
-using DecoderSettings = VideoCodecTester::DecoderSettings;
-using PacingSettings = VideoCodecTester::PacingSettings;
-using PacingMode = PacingSettings::PacingMode;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-// A thread-safe wrapper for video source to be shared with the quality analyzer
-// that reads reference frames from a separate thread.
-class SyncRawVideoSource : public VideoCodecAnalyzer::ReferenceVideoSource {
- public:
- explicit SyncRawVideoSource(RawVideoSource* video_source)
- : video_source_(video_source) {}
-
- absl::optional<VideoFrame> PullFrame() {
- MutexLock lock(&mutex_);
- return video_source_->PullFrame();
- }
-
- VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override {
- MutexLock lock(&mutex_);
- return video_source_->GetFrame(timestamp_rtp, resolution);
- }
-
- protected:
- RawVideoSource* const video_source_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-// Pacer calculates delay necessary to keep frame encode or decode call spaced
-// from the previous calls by the pacing time. `Delay` is expected to be called
-// as close as possible to posting frame encode or decode task. This class is
-// not thread safe.
-class Pacer {
- public:
- explicit Pacer(PacingSettings settings)
- : settings_(settings), delay_(TimeDelta::Zero()) {}
- Timestamp Schedule(Timestamp timestamp) {
- Timestamp now = Timestamp::Micros(rtc::TimeMicros());
- if (settings_.mode == PacingMode::kNoPacing) {
- return now;
- }
-
- Timestamp scheduled = now;
- if (prev_scheduled_) {
- scheduled = *prev_scheduled_ + PacingTime(timestamp);
- if (scheduled < now) {
- scheduled = now;
- }
- }
-
- prev_timestamp_ = timestamp;
- prev_scheduled_ = scheduled;
- return scheduled;
- }
-
- private:
- TimeDelta PacingTime(Timestamp timestamp) {
- if (settings_.mode == PacingMode::kRealTime) {
- return timestamp - *prev_timestamp_;
- }
- RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode);
- return 1 / settings_.constant_rate;
- }
-
- PacingSettings settings_;
- absl::optional<Timestamp> prev_timestamp_;
- absl::optional<Timestamp> prev_scheduled_;
- TimeDelta delay_;
-};
-
-// Task queue that keeps the number of queued tasks below a certain limit. If
-// the limit is reached, posting of a next task is blocked until execution of a
-// previously posted task starts. This class is not thread-safe.
-class LimitedTaskQueue {
- public:
- // The codec tester reads frames from video source in the main thread.
- // Encoding and decoding are done in separate threads. If encoding or
- // decoding is slow, the reading may go far ahead and may buffer too many
- // frames in memory. To prevent this we limit the encoding/decoding queue
- // size. When the queue is full, the main thread and, hence, reading frames
- // from video source is blocked until a previously posted encoding/decoding
- // task starts.
- static constexpr int kMaxTaskQueueSize = 3;
-
- LimitedTaskQueue() : queue_size_(0) {}
-
- void PostScheduledTask(absl::AnyInvocable<void() &&> task, Timestamp start) {
- ++queue_size_;
- task_queue_.PostTask([this, task = std::move(task), start]() mutable {
- int wait_ms = static_cast<int>(start.ms() - rtc::TimeMillis());
- if (wait_ms > 0) {
- SleepMs(wait_ms);
- }
-
- std::move(task)();
- --queue_size_;
- task_executed_.Set();
- });
-
- task_executed_.Reset();
- if (queue_size_ > kMaxTaskQueueSize) {
- task_executed_.Wait(rtc::Event::kForever);
- }
- RTC_CHECK(queue_size_ <= kMaxTaskQueueSize);
- }
-
- void WaitForPreviouslyPostedTasks() {
- task_queue_.SendTask([] {});
- }
-
- TaskQueueForTest task_queue_;
- std::atomic_int queue_size_;
- rtc::Event task_executed_;
-};
-
-class TesterY4mWriter {
- public:
- explicit TesterY4mWriter(absl::string_view base_path)
- : base_path_(base_path) {}
-
- ~TesterY4mWriter() {
- task_queue_.SendTask([] {});
- }
-
- void Write(const VideoFrame& frame, int spatial_idx) {
- task_queue_.PostTask([this, frame, spatial_idx] {
- if (y4m_writers_.find(spatial_idx) == y4m_writers_.end()) {
- std::string file_path =
- base_path_ + "_s" + std::to_string(spatial_idx) + ".y4m";
-
- Y4mVideoFrameWriterImpl* y4m_writer = new Y4mVideoFrameWriterImpl(
- file_path, frame.width(), frame.height(), /*fps=*/30);
- RTC_CHECK(y4m_writer);
-
- y4m_writers_[spatial_idx] =
- std::unique_ptr<VideoFrameWriter>(y4m_writer);
- }
-
- y4m_writers_.at(spatial_idx)->WriteFrame(frame);
- });
- }
-
- protected:
- std::string base_path_;
- std::map<int, std::unique_ptr<VideoFrameWriter>> y4m_writers_;
- TaskQueueForTest task_queue_;
-};
-
-class TesterIvfWriter {
- public:
- explicit TesterIvfWriter(absl::string_view base_path)
- : base_path_(base_path) {}
-
- ~TesterIvfWriter() {
- task_queue_.SendTask([] {});
- }
-
- void Write(const EncodedImage& encoded_frame) {
- task_queue_.PostTask([this, encoded_frame] {
- int spatial_idx = encoded_frame.SpatialIndex().value_or(0);
- if (ivf_file_writers_.find(spatial_idx) == ivf_file_writers_.end()) {
- std::string ivf_path =
- base_path_ + "_s" + std::to_string(spatial_idx) + ".ivf";
-
- FileWrapper ivf_file = FileWrapper::OpenWriteOnly(ivf_path);
- RTC_CHECK(ivf_file.is_open());
-
- std::unique_ptr<IvfFileWriter> ivf_writer =
- IvfFileWriter::Wrap(std::move(ivf_file), /*byte_limit=*/0);
- RTC_CHECK(ivf_writer);
-
- ivf_file_writers_[spatial_idx] = std::move(ivf_writer);
- }
-
- // To play: ffplay -vcodec vp8|vp9|av1|hevc|h264 filename
- ivf_file_writers_.at(spatial_idx)
- ->WriteFrame(encoded_frame, VideoCodecType::kVideoCodecGeneric);
- });
- }
-
- protected:
- std::string base_path_;
- std::map<int, std::unique_ptr<IvfFileWriter>> ivf_file_writers_;
- TaskQueueForTest task_queue_;
-};
-
-class TesterDecoder {
- public:
- TesterDecoder(Decoder* decoder,
- VideoCodecAnalyzer* analyzer,
- const DecoderSettings& settings)
- : decoder_(decoder),
- analyzer_(analyzer),
- settings_(settings),
- pacer_(settings.pacing) {
- RTC_CHECK(analyzer_) << "Analyzer must be provided";
-
- if (settings.decoder_input_base_path) {
- input_writer_ =
- std::make_unique<TesterIvfWriter>(*settings.decoder_input_base_path);
- }
-
- if (settings.decoder_output_base_path) {
- output_writer_ =
- std::make_unique<TesterY4mWriter>(*settings.decoder_output_base_path);
- }
- }
-
- void Initialize() {
- task_queue_.PostScheduledTask([this] { decoder_->Initialize(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- void Decode(const EncodedImage& input_frame) {
- Timestamp timestamp =
- Timestamp::Micros((input_frame.RtpTimestamp() / k90kHz).us());
-
- task_queue_.PostScheduledTask(
- [this, input_frame] {
- analyzer_->StartDecode(input_frame);
-
- decoder_->Decode(
- input_frame,
- [this, spatial_idx = input_frame.SpatialIndex().value_or(0)](
- const VideoFrame& output_frame) {
- analyzer_->FinishDecode(output_frame, spatial_idx);
-
- if (output_writer_) {
- output_writer_->Write(output_frame, spatial_idx);
- }
- });
-
- if (input_writer_) {
- input_writer_->Write(input_frame);
- }
- },
- pacer_.Schedule(timestamp));
- }
-
- void Flush() {
- task_queue_.PostScheduledTask([this] { decoder_->Flush(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- protected:
- Decoder* const decoder_;
- VideoCodecAnalyzer* const analyzer_;
- const DecoderSettings& settings_;
- Pacer pacer_;
- LimitedTaskQueue task_queue_;
- std::unique_ptr<TesterIvfWriter> input_writer_;
- std::unique_ptr<TesterY4mWriter> output_writer_;
-};
-
-class TesterEncoder {
- public:
- TesterEncoder(Encoder* encoder,
- TesterDecoder* decoder,
- VideoCodecAnalyzer* analyzer,
- const EncoderSettings& settings)
- : encoder_(encoder),
- decoder_(decoder),
- analyzer_(analyzer),
- settings_(settings),
- pacer_(settings.pacing) {
- RTC_CHECK(analyzer_) << "Analyzer must be provided";
- if (settings.encoder_input_base_path) {
- input_writer_ =
- std::make_unique<TesterY4mWriter>(*settings.encoder_input_base_path);
- }
-
- if (settings.encoder_output_base_path) {
- output_writer_ =
- std::make_unique<TesterIvfWriter>(*settings.encoder_output_base_path);
- }
- }
-
- void Initialize() {
- task_queue_.PostScheduledTask([this] { encoder_->Initialize(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- void Encode(const VideoFrame& input_frame) {
- Timestamp timestamp =
- Timestamp::Micros((input_frame.timestamp() / k90kHz).us());
-
- task_queue_.PostScheduledTask(
- [this, input_frame] {
- analyzer_->StartEncode(input_frame);
- encoder_->Encode(input_frame,
- [this](const EncodedImage& encoded_frame) {
- analyzer_->FinishEncode(encoded_frame);
-
- if (decoder_ != nullptr) {
- decoder_->Decode(encoded_frame);
- }
-
- if (output_writer_ != nullptr) {
- output_writer_->Write(encoded_frame);
- }
- });
-
- if (input_writer_) {
- input_writer_->Write(input_frame, /*spatial_idx=*/0);
- }
- },
- pacer_.Schedule(timestamp));
- }
-
- void Flush() {
- task_queue_.PostScheduledTask([this] { encoder_->Flush(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- protected:
- Encoder* const encoder_;
- TesterDecoder* const decoder_;
- VideoCodecAnalyzer* const analyzer_;
- const EncoderSettings& settings_;
- std::unique_ptr<TesterY4mWriter> input_writer_;
- std::unique_ptr<TesterIvfWriter> output_writer_;
- Pacer pacer_;
- LimitedTaskQueue task_queue_;
-};
-
-} // namespace
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunDecodeTest(
- CodedVideoSource* video_source,
- Decoder* decoder,
- const DecoderSettings& decoder_settings) {
- VideoCodecAnalyzer perf_analyzer;
- TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings);
-
- tester_decoder.Initialize();
-
- while (auto frame = video_source->PullFrame()) {
- tester_decoder.Decode(*frame);
- }
-
- tester_decoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunEncodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- const EncoderSettings& encoder_settings) {
- SyncRawVideoSource sync_source(video_source);
- VideoCodecAnalyzer perf_analyzer;
- TesterEncoder tester_encoder(encoder, /*decoder=*/nullptr, &perf_analyzer,
- encoder_settings);
-
- tester_encoder.Initialize();
-
- while (auto frame = sync_source.PullFrame()) {
- tester_encoder.Encode(*frame);
- }
-
- tester_encoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunEncodeDecodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- Decoder* decoder,
- const EncoderSettings& encoder_settings,
- const DecoderSettings& decoder_settings) {
- SyncRawVideoSource sync_source(video_source);
- VideoCodecAnalyzer perf_analyzer(&sync_source);
- TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings);
- TesterEncoder tester_encoder(encoder, &tester_decoder, &perf_analyzer,
- encoder_settings);
-
- tester_encoder.Initialize();
- tester_decoder.Initialize();
-
- while (auto frame = sync_source.PullFrame()) {
- tester_encoder.Encode(*frame);
- }
-
- tester_encoder.Flush();
- tester_decoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h
deleted file mode 100644
index 32191b5a98..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
-
-#include <memory>
-
-#include "api/test/video_codec_tester.h"
-
-namespace webrtc {
-namespace test {
-
-// A stateless implementation of `VideoCodecTester`. This class is thread safe.
-class VideoCodecTesterImpl : public VideoCodecTester {
- public:
- std::unique_ptr<VideoCodecStats> RunDecodeTest(
- CodedVideoSource* video_source,
- Decoder* decoder,
- const DecoderSettings& decoder_settings) override;
-
- std::unique_ptr<VideoCodecStats> RunEncodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- const EncoderSettings& encoder_settings) override;
-
- std::unique_ptr<VideoCodecStats> RunEncodeDecodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- Decoder* decoder,
- const EncoderSettings& encoder_settings,
- const DecoderSettings& decoder_settings) override;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc
deleted file mode 100644
index a8c118ef20..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_tester_impl.h"
-
-#include <memory>
-#include <tuple>
-#include <utility>
-#include <vector>
-
-#include "api/units/frequency.h"
-#include "api/units/time_delta.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_frame.h"
-#include "rtc_base/fake_clock.h"
-#include "rtc_base/gunit.h"
-#include "rtc_base/task_queue_for_test.h"
-#include "rtc_base/time_utils.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::_;
-using ::testing::Invoke;
-using ::testing::InvokeWithoutArgs;
-using ::testing::Return;
-
-using Decoder = VideoCodecTester::Decoder;
-using Encoder = VideoCodecTester::Encoder;
-using CodedVideoSource = VideoCodecTester::CodedVideoSource;
-using RawVideoSource = VideoCodecTester::RawVideoSource;
-using DecoderSettings = VideoCodecTester::DecoderSettings;
-using EncoderSettings = VideoCodecTester::EncoderSettings;
-using PacingSettings = VideoCodecTester::PacingSettings;
-using PacingMode = PacingSettings::PacingMode;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-struct PacingTestParams {
- PacingSettings pacing_settings;
- Frequency framerate;
- int num_frames;
- std::vector<int> expected_delta_ms;
-};
-
-VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) {
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(2, 2));
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
-}
-
-EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) {
- EncodedImage encoded_image;
- encoded_image.SetRtpTimestamp(timestamp_rtp);
- return encoded_image;
-}
-
-class MockRawVideoSource : public RawVideoSource {
- public:
- MockRawVideoSource(int num_frames, Frequency framerate)
- : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {}
-
- absl::optional<VideoFrame> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt;
- }
- uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_;
- ++frame_num_;
- return CreateVideoFrame(timestamp_rtp);
- }
-
- MOCK_METHOD(VideoFrame,
- GetFrame,
- (uint32_t timestamp_rtp, Resolution),
- (override));
-
- private:
- int num_frames_;
- int frame_num_;
- Frequency framerate_;
-};
-
-class MockCodedVideoSource : public CodedVideoSource {
- public:
- MockCodedVideoSource(int num_frames, Frequency framerate)
- : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {}
-
- absl::optional<EncodedImage> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt;
- }
- uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_;
- ++frame_num_;
- return CreateEncodedImage(timestamp_rtp);
- }
-
- private:
- int num_frames_;
- int frame_num_;
- Frequency framerate_;
-};
-
-class MockDecoder : public Decoder {
- public:
- MOCK_METHOD(void, Initialize, (), (override));
- MOCK_METHOD(void,
- Decode,
- (const EncodedImage& frame, DecodeCallback callback),
- (override));
- MOCK_METHOD(void, Flush, (), (override));
-};
-
-class MockEncoder : public Encoder {
- public:
- MOCK_METHOD(void, Initialize, (), (override));
- MOCK_METHOD(void,
- Encode,
- (const VideoFrame& frame, EncodeCallback callback),
- (override));
- MOCK_METHOD(void, Flush, (), (override));
-};
-
-} // namespace
-
-class VideoCodecTesterImplPacingTest
- : public ::testing::TestWithParam<PacingTestParams> {
- public:
- VideoCodecTesterImplPacingTest() : test_params_(GetParam()) {}
-
- protected:
- PacingTestParams test_params_;
-};
-
-TEST_P(VideoCodecTesterImplPacingTest, PaceEncode) {
- MockRawVideoSource video_source(test_params_.num_frames,
- test_params_.framerate);
- MockEncoder encoder;
- EncoderSettings encoder_settings;
- encoder_settings.pacing = test_params_.pacing_settings;
-
- VideoCodecTesterImpl tester;
- auto fs =
- tester.RunEncodeTest(&video_source, &encoder, encoder_settings)->Slice();
- ASSERT_EQ(static_cast<int>(fs.size()), test_params_.num_frames);
-
- for (size_t i = 1; i < fs.size(); ++i) {
- int delta_ms = (fs[i].encode_start - fs[i - 1].encode_start).ms();
- EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 10);
- }
-}
-
-TEST_P(VideoCodecTesterImplPacingTest, PaceDecode) {
- MockCodedVideoSource video_source(test_params_.num_frames,
- test_params_.framerate);
- MockDecoder decoder;
- DecoderSettings decoder_settings;
- decoder_settings.pacing = test_params_.pacing_settings;
-
- VideoCodecTesterImpl tester;
- auto fs =
- tester.RunDecodeTest(&video_source, &decoder, decoder_settings)->Slice();
- ASSERT_EQ(static_cast<int>(fs.size()), test_params_.num_frames);
-
- for (size_t i = 1; i < fs.size(); ++i) {
- int delta_ms = (fs[i].decode_start - fs[i - 1].decode_start).ms();
- EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 20);
- }
-}
-
-INSTANTIATE_TEST_SUITE_P(
- DISABLED_All,
- VideoCodecTesterImplPacingTest,
- ::testing::ValuesIn(
- {// No pacing.
- PacingTestParams({.pacing_settings = {.mode = PacingMode::kNoPacing},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {0, 0}}),
- // Real-time pacing.
- PacingTestParams({.pacing_settings = {.mode = PacingMode::kRealTime},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {100, 100}}),
- // Pace with specified constant rate.
- PacingTestParams(
- {.pacing_settings = {.mode = PacingMode::kConstantRate,
- .constant_rate = Frequency::Hertz(20)},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {50, 50}})}));
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/encoded_frame_gn/moz.build b/third_party/libwebrtc/modules/video_coding/encoded_frame_gn/moz.build
index 9b8e33b7d5..31e83f9c31 100644
--- a/third_party/libwebrtc/modules/video_coding/encoded_frame_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/encoded_frame_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build
index 487fc5b4d6..1ad9c574ad 100644
--- a/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/frame_helpers_gn/moz.build b/third_party/libwebrtc/modules/video_coding/frame_helpers_gn/moz.build
index dd901a5371..ccac90f50d 100644
--- a/third_party/libwebrtc/modules/video_coding/frame_helpers_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/frame_helpers_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/generic_decoder.cc b/third_party/libwebrtc/modules/video_coding/generic_decoder.cc
index fc356e7a44..00585abbc9 100644
--- a/third_party/libwebrtc/modules/video_coding/generic_decoder.cc
+++ b/third_party/libwebrtc/modules/video_coding/generic_decoder.cc
@@ -329,18 +329,7 @@ int32_t VCMGenericDecoder::Decode(const EncodedImage& frame,
}
_callback->OnDecoderInfoChanged(std::move(decoder_info));
}
- if (ret < WEBRTC_VIDEO_CODEC_OK) {
- const absl::optional<uint32_t> ssrc =
- !frame_info.packet_infos.empty()
- ? absl::make_optional(frame_info.packet_infos[0].ssrc())
- : absl::nullopt;
- RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp "
- << frame.RtpTimestamp() << ", ssrc "
- << (ssrc ? rtc::ToString(*ssrc) : "<not set>")
- << ", error code: " << ret;
- _callback->ClearTimestampMap();
- } else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) {
- // No output.
+ if (ret < WEBRTC_VIDEO_CODEC_OK || ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) {
_callback->ClearTimestampMap();
}
return ret;
diff --git a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h
index c6522fcc6b..987e1b623e 100644
--- a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h
+++ b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h
@@ -50,7 +50,9 @@ struct CodecSpecificInfoVP8 {
size_t updatedBuffers[kBuffersCount];
size_t updatedBuffersCount;
};
-static_assert(std::is_pod<CodecSpecificInfoVP8>::value, "");
+static_assert(std::is_trivial_v<CodecSpecificInfoVP8> &&
+ std::is_standard_layout_v<CodecSpecificInfoVP8>,
+ "");
// Hack alert - the code assumes that thisstruct is memset when constructed.
struct CodecSpecificInfoVP9 {
@@ -79,7 +81,9 @@ struct CodecSpecificInfoVP9 {
uint8_t num_ref_pics;
uint8_t p_diff[kMaxVp9RefPics];
};
-static_assert(std::is_pod<CodecSpecificInfoVP9>::value, "");
+static_assert(std::is_trivial_v<CodecSpecificInfoVP9> &&
+ std::is_standard_layout_v<CodecSpecificInfoVP9>,
+ "");
// Hack alert - the code assumes that thisstruct is memset when constructed.
struct CodecSpecificInfoH264 {
@@ -88,14 +92,18 @@ struct CodecSpecificInfoH264 {
bool base_layer_sync;
bool idr_frame;
};
-static_assert(std::is_pod<CodecSpecificInfoH264>::value, "");
+static_assert(std::is_trivial_v<CodecSpecificInfoH264> &&
+ std::is_standard_layout_v<CodecSpecificInfoH264>,
+ "");
union CodecSpecificInfoUnion {
CodecSpecificInfoVP8 VP8;
CodecSpecificInfoVP9 VP9;
CodecSpecificInfoH264 H264;
};
-static_assert(std::is_pod<CodecSpecificInfoUnion>::value, "");
+static_assert(std::is_trivial_v<CodecSpecificInfoUnion> &&
+ std::is_standard_layout_v<CodecSpecificInfoUnion>,
+ "");
// Note: if any pointers are added to this struct or its sub-structs, it
// must be fitted with a copy-constructor. This is because it is copied
diff --git a/third_party/libwebrtc/modules/video_coding/nack_requester_gn/moz.build b/third_party/libwebrtc/modules/video_coding/nack_requester_gn/moz.build
index 0f6654f1ab..d50ed75a00 100644
--- a/third_party/libwebrtc/modules/video_coding/nack_requester_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/nack_requester_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/packet_buffer_gn/moz.build b/third_party/libwebrtc/modules/video_coding/packet_buffer_gn/moz.build
index f3f85aacaa..2c161989c1 100644
--- a/third_party/libwebrtc/modules/video_coding/packet_buffer_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/packet_buffer_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build
index 8a1dfd6377..80eb00a991 100644
--- a/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build
index a3ea8b3495..931dfe8d89 100644
--- a/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build
@@ -202,7 +202,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -212,10 +211,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/svc/scalable_video_controller_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/scalable_video_controller_gn/moz.build
index a285154a79..18aa68e696 100644
--- a/third_party/libwebrtc/modules/video_coding/svc/scalable_video_controller_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/svc/scalable_video_controller_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build
index 412f719d18..bbb5a75959 100644
--- a/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/decode_time_percentile_filter_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/decode_time_percentile_filter_gn/moz.build
index 36867642c7..2347b0937c 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/decode_time_percentile_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/decode_time_percentile_filter_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter_gn/moz.build
index caf0efc165..274023c6e7 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/frame_delay_variation_kalman_filter_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/inter_frame_delay_variation_calculator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/inter_frame_delay_variation_calculator_gn/moz.build
index 8c6e826a4a..d4ec330ed1 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/inter_frame_delay_variation_calculator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/inter_frame_delay_variation_calculator_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/jitter_estimator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/jitter_estimator_gn/moz.build
index c7ca3c7fd8..e540f00f8c 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/jitter_estimator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/jitter_estimator_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/rtt_filter_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/rtt_filter_gn/moz.build
index f3993a17b1..18a30a6ede 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/rtt_filter_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/rtt_filter_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/timestamp_extrapolator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/timestamp_extrapolator_gn/moz.build
index ad8a6874e4..4c2a6eed62 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/timestamp_extrapolator_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/timestamp_extrapolator_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/timing/timing_module_gn/moz.build b/third_party/libwebrtc/modules/video_coding/timing/timing_module_gn/moz.build
index 60cc81a229..76c4cfe664 100644
--- a/third_party/libwebrtc/modules/video_coding/timing/timing_module_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/timing/timing_module_gn/moz.build
@@ -199,7 +199,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -209,10 +208,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build b/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build
index b14bef2dec..141def9090 100644
--- a/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build
@@ -196,7 +196,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -206,10 +205,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/video_coding_gn/moz.build b/third_party/libwebrtc/modules/video_coding/video_coding_gn/moz.build
index 5af51f1238..923ac7785a 100644
--- a/third_party/libwebrtc/modules/video_coding/video_coding_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/video_coding_gn/moz.build
@@ -214,7 +214,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -224,10 +223,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/video_coding_utility_gn/moz.build b/third_party/libwebrtc/modules/video_coding/video_coding_utility_gn/moz.build
index d42eb284cd..bc1510e0ba 100644
--- a/third_party/libwebrtc/modules/video_coding/video_coding_utility_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/video_coding_utility_gn/moz.build
@@ -211,7 +211,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -221,10 +220,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_libvpx_interface_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_libvpx_interface_gn/moz.build
index 81c9b9d404..8cb4b64625 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_libvpx_interface_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_libvpx_interface_gn/moz.build
@@ -191,7 +191,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -201,10 +200,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_gn/moz.build
index 82a4d24e97..21d5eeee9f 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_gn/moz.build
@@ -206,7 +206,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -216,10 +215,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_scalability_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_scalability_gn/moz.build
index 6799224dff..92fd7cf630 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_scalability_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_scalability_gn/moz.build
@@ -188,7 +188,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -198,10 +197,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_temporal_layers_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_temporal_layers_gn/moz.build
index 2423950ba5..caf91a5d2c 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_vp8_temporal_layers_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_vp8_temporal_layers_gn/moz.build
@@ -205,7 +205,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -215,10 +214,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_vp9_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_vp9_gn/moz.build
index 5bb64f3412..707d563559 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_vp9_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_vp9_gn/moz.build
@@ -208,7 +208,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -218,10 +217,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/webrtc_vp9_helpers_gn/moz.build b/third_party/libwebrtc/modules/video_coding/webrtc_vp9_helpers_gn/moz.build
index 6f1575870e..883e5c70b2 100644
--- a/third_party/libwebrtc/modules/video_coding/webrtc_vp9_helpers_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/webrtc_vp9_helpers_gn/moz.build
@@ -200,7 +200,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -210,10 +209,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True