summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/api/video
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--third_party/libwebrtc/api/video/BUILD.gn421
-rw-r--r--third_party/libwebrtc/api/video/DEPS77
-rw-r--r--third_party/libwebrtc/api/video/OWNERS5
-rw-r--r--third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.cc50
-rw-r--r--third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.h25
-rw-r--r--third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory_gn/moz.build233
-rw-r--r--third_party/libwebrtc/api/video/color_space.cc187
-rw-r--r--third_party/libwebrtc/api/video/color_space.h178
-rw-r--r--third_party/libwebrtc/api/video/encoded_frame.cc33
-rw-r--r--third_party/libwebrtc/api/video/encoded_frame.h74
-rw-r--r--third_party/libwebrtc/api/video/encoded_frame_gn/moz.build232
-rw-r--r--third_party/libwebrtc/api/video/encoded_image.cc99
-rw-r--r--third_party/libwebrtc/api/video/encoded_image.h226
-rw-r--r--third_party/libwebrtc/api/video/encoded_image_gn/moz.build225
-rw-r--r--third_party/libwebrtc/api/video/frame_buffer.cc332
-rw-r--r--third_party/libwebrtc/api/video/frame_buffer.h110
-rw-r--r--third_party/libwebrtc/api/video/frame_buffer_gn/moz.build232
-rw-r--r--third_party/libwebrtc/api/video/frame_buffer_unittest.cc393
-rw-r--r--third_party/libwebrtc/api/video/hdr_metadata.cc21
-rw-r--r--third_party/libwebrtc/api/video/hdr_metadata.h105
-rw-r--r--third_party/libwebrtc/api/video/i010_buffer.cc213
-rw-r--r--third_party/libwebrtc/api/video/i010_buffer.h84
-rw-r--r--third_party/libwebrtc/api/video/i210_buffer.cc211
-rw-r--r--third_party/libwebrtc/api/video/i210_buffer.h84
-rw-r--r--third_party/libwebrtc/api/video/i410_buffer.cc221
-rw-r--r--third_party/libwebrtc/api/video/i410_buffer.h104
-rw-r--r--third_party/libwebrtc/api/video/i420_buffer.cc232
-rw-r--r--third_party/libwebrtc/api/video/i420_buffer.h118
-rw-r--r--third_party/libwebrtc/api/video/i422_buffer.cc237
-rw-r--r--third_party/libwebrtc/api/video/i422_buffer.h114
-rw-r--r--third_party/libwebrtc/api/video/i444_buffer.cc211
-rw-r--r--third_party/libwebrtc/api/video/i444_buffer.h104
-rw-r--r--third_party/libwebrtc/api/video/nv12_buffer.cc155
-rw-r--r--third_party/libwebrtc/api/video/nv12_buffer.h85
-rw-r--r--third_party/libwebrtc/api/video/recordable_encoded_frame.h61
-rw-r--r--third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build209
-rw-r--r--third_party/libwebrtc/api/video/render_resolution.h46
-rw-r--r--third_party/libwebrtc/api/video/render_resolution_gn/moz.build201
-rw-r--r--third_party/libwebrtc/api/video/resolution.h38
-rw-r--r--third_party/libwebrtc/api/video/resolution_gn/moz.build201
-rw-r--r--third_party/libwebrtc/api/video/rtp_video_frame_assembler.cc340
-rw-r--r--third_party/libwebrtc/api/video/rtp_video_frame_assembler.h76
-rw-r--r--third_party/libwebrtc/api/video/rtp_video_frame_assembler_unittests.cc583
-rw-r--r--third_party/libwebrtc/api/video/test/BUILD.gn56
-rw-r--r--third_party/libwebrtc/api/video/test/color_space_unittest.cc74
-rw-r--r--third_party/libwebrtc/api/video/test/i210_buffer_unittest.cc126
-rw-r--r--third_party/libwebrtc/api/video/test/i410_buffer_unittest.cc120
-rw-r--r--third_party/libwebrtc/api/video/test/i422_buffer_unittest.cc128
-rw-r--r--third_party/libwebrtc/api/video/test/i444_buffer_unittest.cc112
-rw-r--r--third_party/libwebrtc/api/video/test/mock_recordable_encoded_frame.h34
-rw-r--r--third_party/libwebrtc/api/video/test/nv12_buffer_unittest.cc119
-rw-r--r--third_party/libwebrtc/api/video/test/video_adaptation_counters_unittest.cc32
-rw-r--r--third_party/libwebrtc/api/video/test/video_bitrate_allocation_unittest.cc64
-rw-r--r--third_party/libwebrtc/api/video/test/video_frame_matchers.h34
-rw-r--r--third_party/libwebrtc/api/video/video_adaptation_counters.cc42
-rw-r--r--third_party/libwebrtc/api/video/video_adaptation_counters.h46
-rw-r--r--third_party/libwebrtc/api/video/video_adaptation_gn/moz.build221
-rw-r--r--third_party/libwebrtc/api/video/video_adaptation_reason.h20
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocation.cc185
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocation.h96
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocation_gn/moz.build221
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocator.cc54
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocator.h60
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocator_factory.h33
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build209
-rw-r--r--third_party/libwebrtc/api/video/video_bitrate_allocator_gn/moz.build221
-rw-r--r--third_party/libwebrtc/api/video/video_codec_constants.h24
-rw-r--r--third_party/libwebrtc/api/video/video_codec_constants_gn/moz.build201
-rw-r--r--third_party/libwebrtc/api/video/video_codec_type.h29
-rw-r--r--third_party/libwebrtc/api/video/video_content_type.cc93
-rw-r--r--third_party/libwebrtc/api/video/video_content_type.h39
-rw-r--r--third_party/libwebrtc/api/video/video_frame.cc317
-rw-r--r--third_party/libwebrtc/api/video/video_frame.h302
-rw-r--r--third_party/libwebrtc/api/video/video_frame_buffer.cc242
-rw-r--r--third_party/libwebrtc/api/video/video_frame_buffer.h325
-rw-r--r--third_party/libwebrtc/api/video/video_frame_gn/moz.build236
-rw-r--r--third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build232
-rw-r--r--third_party/libwebrtc/api/video/video_frame_metadata.cc139
-rw-r--r--third_party/libwebrtc/api/video/video_frame_metadata.h112
-rw-r--r--third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build225
-rw-r--r--third_party/libwebrtc/api/video/video_frame_type.h26
-rw-r--r--third_party/libwebrtc/api/video/video_frame_type_gn/moz.build201
-rw-r--r--third_party/libwebrtc/api/video/video_layers_allocation.h77
-rw-r--r--third_party/libwebrtc/api/video/video_layers_allocation_gn/moz.build205
-rw-r--r--third_party/libwebrtc/api/video/video_rotation.h26
-rw-r--r--third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build228
-rw-r--r--third_party/libwebrtc/api/video/video_sink_interface.h39
-rw-r--r--third_party/libwebrtc/api/video/video_source_interface.cc19
-rw-r--r--third_party/libwebrtc/api/video/video_source_interface.h130
-rw-r--r--third_party/libwebrtc/api/video/video_stream_decoder.h57
-rw-r--r--third_party/libwebrtc/api/video/video_stream_decoder_create.cc32
-rw-r--r--third_party/libwebrtc/api/video/video_stream_decoder_create.h37
-rw-r--r--third_party/libwebrtc/api/video/video_stream_decoder_create_unittest.cc46
-rw-r--r--third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build209
-rw-r--r--third_party/libwebrtc/api/video/video_stream_encoder_settings.h60
-rw-r--r--third_party/libwebrtc/api/video/video_timing.cc101
-rw-r--r--third_party/libwebrtc/api/video/video_timing.h132
-rw-r--r--third_party/libwebrtc/api/video_codecs/BUILD.gn314
-rw-r--r--third_party/libwebrtc/api/video_codecs/OWNERS4
-rw-r--r--third_party/libwebrtc/api/video_codecs/av1_profile.cc69
-rw-r--r--third_party/libwebrtc/api/video_codecs/av1_profile.h57
-rw-r--r--third_party/libwebrtc/api/video_codecs/bitstream_parser.h37
-rw-r--r--third_party/libwebrtc/api/video_codecs/bitstream_parser_api_gn/moz.build205
-rw-r--r--third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.cc23
-rw-r--r--third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.h27
-rw-r--r--third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.cc71
-rw-r--r--third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.h28
-rw-r--r--third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc256
-rw-r--r--third_party/libwebrtc/api/video_codecs/h264_profile_level_id.h92
-rw-r--r--third_party/libwebrtc/api/video_codecs/rtc_software_fallback_wrappers_gn/moz.build234
-rw-r--r--third_party/libwebrtc/api/video_codecs/scalability_mode.cc91
-rw-r--r--third_party/libwebrtc/api/video_codecs/scalability_mode.h111
-rw-r--r--third_party/libwebrtc/api/video_codecs/scalability_mode_gn/moz.build221
-rw-r--r--third_party/libwebrtc/api/video_codecs/scalability_mode_helper.cc37
-rw-r--r--third_party/libwebrtc/api/video_codecs/scalability_mode_helper.h31
-rw-r--r--third_party/libwebrtc/api/video_codecs/sdp_video_format.cc171
-rw-r--r--third_party/libwebrtc/api/video_codecs/sdp_video_format.h75
-rw-r--r--third_party/libwebrtc/api/video_codecs/simulcast_stream.cc37
-rw-r--r--third_party/libwebrtc/api/video_codecs/simulcast_stream.h39
-rw-r--r--third_party/libwebrtc/api/video_codecs/spatial_layer.cc25
-rw-r--r--third_party/libwebrtc/api/video_codecs/spatial_layer.h32
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/BUILD.gn81
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc39
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/h264_profile_level_id_unittest.cc171
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/sdp_video_format_unittest.cc103
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/video_decoder_factory_template_tests.cc123
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc305
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/video_encoder_factory_template_tests.cc172
-rw-r--r--third_party/libwebrtc/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc1055
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_codec.cc155
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_codec.h200
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build235
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder.cc65
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder.h136
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory.h69
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory_template.h95
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h34
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h33
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h32
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h44
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc284
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.h31
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder.cc345
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder.h426
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory.h127
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory_template.h135
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h43
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h46
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h36
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h50
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc519
-rw-r--r--third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.h47
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_frame_buffer_controller.h192
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_frame_config.cc78
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_frame_config.h110
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.cc108
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.h77
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.cc59
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.h34
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory_gn/moz.build232
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp9_profile.cc73
-rw-r--r--third_party/libwebrtc/api/video_codecs/vp9_profile.h54
-rw-r--r--third_party/libwebrtc/api/video_track_source_constraints.h32
-rw-r--r--third_party/libwebrtc/api/video_track_source_constraints_gn/moz.build201
-rw-r--r--third_party/libwebrtc/api/video_track_source_proxy_factory.h29
165 files changed, 22436 insertions, 0 deletions
diff --git a/third_party/libwebrtc/api/video/BUILD.gn b/third_party/libwebrtc/api/video/BUILD.gn
new file mode 100644
index 0000000000..5d1aa2a1a3
--- /dev/null
+++ b/third_party/libwebrtc/api/video/BUILD.gn
@@ -0,0 +1,421 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+if (is_android) {
+ import("//build/config/android/config.gni")
+ import("//build/config/android/rules.gni")
+}
+
+rtc_library("video_rtp_headers") {
+ visibility = [ "*" ]
+ sources = [
+ "color_space.cc",
+ "color_space.h",
+ "hdr_metadata.cc",
+ "hdr_metadata.h",
+ "video_content_type.cc",
+ "video_content_type.h",
+ "video_rotation.h",
+ "video_timing.cc",
+ "video_timing.h",
+ ]
+
+ deps = [
+ "..:array_view",
+ "../../rtc_base:logging",
+ "../../rtc_base:safe_conversions",
+ "../../rtc_base:stringutils",
+ "../../rtc_base/system:rtc_export",
+ "../units:data_rate",
+ "../units:time_delta",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_frame") {
+ visibility = [ "*" ]
+ sources = [
+ "i420_buffer.cc",
+ "i420_buffer.h",
+ "i422_buffer.cc",
+ "i422_buffer.h",
+ "i444_buffer.cc",
+ "i444_buffer.h",
+ "nv12_buffer.cc",
+ "nv12_buffer.h",
+ "video_codec_type.h",
+ "video_frame.cc",
+ "video_frame.h",
+ "video_frame_buffer.cc",
+ "video_frame_buffer.h",
+ "video_sink_interface.h",
+ "video_source_interface.cc",
+ "video_source_interface.h",
+ ]
+
+ deps = [
+ ":video_rtp_headers",
+ "..:array_view",
+ "..:make_ref_counted",
+ "..:rtp_packet_info",
+ "..:scoped_refptr",
+ "..:video_track_source_constraints",
+ "../../rtc_base:checks",
+ "../../rtc_base:refcount",
+ "../../rtc_base:timeutils",
+ "../../rtc_base/memory:aligned_malloc",
+ "../../rtc_base/system:rtc_export",
+ "//third_party/libyuv",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+if (is_android) {
+ java_cpp_enum("video_frame_enums") {
+ sources = [ "video_frame_buffer.h" ]
+ }
+}
+
+rtc_library("video_frame_i010") {
+ visibility = [ "*" ]
+ sources = [
+ "i010_buffer.cc",
+ "i010_buffer.h",
+ "i210_buffer.cc",
+ "i210_buffer.h",
+ "i410_buffer.cc",
+ "i410_buffer.h",
+ ]
+ deps = [
+ ":video_frame",
+ ":video_rtp_headers",
+ "..:make_ref_counted",
+ "..:scoped_refptr",
+ "../../rtc_base:checks",
+ "../../rtc_base:refcount",
+ "../../rtc_base/memory:aligned_malloc",
+ "//third_party/libyuv",
+ ]
+}
+
+rtc_source_set("recordable_encoded_frame") {
+ visibility = [ "*" ]
+ sources = [ "recordable_encoded_frame.h" ]
+
+ deps = [
+ ":encoded_image",
+ ":video_frame",
+ ":video_rtp_headers",
+ "..:array_view",
+ "..:make_ref_counted",
+ "..:scoped_refptr",
+ "../units:timestamp",
+ ]
+}
+
+rtc_source_set("video_frame_type") {
+ visibility = [ "*" ]
+ sources = [ "video_frame_type.h" ]
+}
+
+rtc_source_set("render_resolution") {
+ visibility = [ "*" ]
+ public = [ "render_resolution.h" ]
+}
+
+rtc_source_set("resolution") {
+ visibility = [ "*" ]
+ public = [ "resolution.h" ]
+}
+
+rtc_library("encoded_image") {
+ visibility = [ "*" ]
+ sources = [
+ "encoded_image.cc",
+ "encoded_image.h",
+ ]
+ deps = [
+ ":video_codec_constants",
+ ":video_frame",
+ ":video_frame_type",
+ ":video_rtp_headers",
+ "..:refcountedbase",
+ "..:rtp_packet_info",
+ "..:scoped_refptr",
+ "../../rtc_base:checks",
+ "../../rtc_base:refcount",
+ "../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("encoded_frame") {
+ visibility = [ "*" ]
+ sources = [
+ "encoded_frame.cc",
+ "encoded_frame.h",
+ ]
+
+ deps = [
+ "../../modules/video_coding:encoded_frame",
+ "../units:timestamp",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("rtp_video_frame_assembler") {
+ visibility = [ "*" ]
+ sources = [
+ "rtp_video_frame_assembler.cc",
+ "rtp_video_frame_assembler.h",
+ ]
+
+ deps = [
+ ":encoded_frame",
+ "../../modules/rtp_rtcp:rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../modules/video_coding:packet_buffer",
+ "../../modules/video_coding:video_coding",
+ "../../rtc_base:logging",
+ "../../rtc_base:rtc_numerics",
+ ]
+
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("rtp_video_frame_assembler_unittests") {
+ testonly = true
+ sources = [ "rtp_video_frame_assembler_unittests.cc" ]
+
+ deps = [
+ ":rtp_video_frame_assembler",
+ "..:array_view",
+ "../../modules/rtp_rtcp:rtp_packetizer_av1_test_helper",
+ "../../modules/rtp_rtcp:rtp_rtcp",
+ "../../modules/rtp_rtcp:rtp_rtcp_format",
+ "../../test:test_support",
+ ]
+}
+
+rtc_source_set("video_codec_constants") {
+ visibility = [ "*" ]
+ sources = [ "video_codec_constants.h" ]
+ deps = []
+}
+
+rtc_library("video_bitrate_allocation") {
+ visibility = [ "*" ]
+ sources = [
+ "video_bitrate_allocation.cc",
+ "video_bitrate_allocation.h",
+ ]
+ deps = [
+ ":video_codec_constants",
+ "../../rtc_base:checks",
+ "../../rtc_base:safe_conversions",
+ "../../rtc_base:stringutils",
+ "../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("video_layers_allocation") {
+ visibility = [ "*" ]
+ sources = [ "video_layers_allocation.h" ]
+ deps = [ "../units:data_rate" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ]
+}
+
+rtc_library("video_bitrate_allocator") {
+ visibility = [ "*" ]
+ sources = [
+ "video_bitrate_allocator.cc",
+ "video_bitrate_allocator.h",
+ ]
+ deps = [
+ ":video_bitrate_allocation",
+ "../units:data_rate",
+ ]
+}
+
+rtc_source_set("video_bitrate_allocator_factory") {
+ visibility = [ "*" ]
+ sources = [ "video_bitrate_allocator_factory.h" ]
+ deps = [
+ ":video_bitrate_allocator",
+ "../video_codecs:video_codecs_api",
+ ]
+}
+
+rtc_source_set("video_stream_decoder") {
+ visibility = [ "*" ]
+ sources = [ "video_stream_decoder.h" ]
+
+ deps = [
+ ":encoded_frame",
+ ":video_frame",
+ ":video_rtp_headers",
+ "../task_queue",
+ "../units:time_delta",
+ "../video_codecs:video_codecs_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("video_stream_decoder_create") {
+ visibility = [ "*" ]
+ sources = [
+ "video_stream_decoder_create.cc",
+ "video_stream_decoder_create.h",
+ ]
+
+ deps = [
+ ":video_stream_decoder",
+ "../../api:field_trials_view",
+ "../../video:video_stream_decoder_impl",
+ "../task_queue",
+ "../video_codecs:video_codecs_api",
+ ]
+}
+
+rtc_library("video_adaptation") {
+ visibility = [ "*" ]
+ sources = [
+ "video_adaptation_counters.cc",
+ "video_adaptation_counters.h",
+ "video_adaptation_reason.h",
+ ]
+
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base:stringutils",
+ ]
+}
+
+rtc_source_set("video_stream_encoder") {
+ visibility = [ "*" ]
+ sources = [ "video_stream_encoder_settings.h" ]
+
+ deps = [
+ ":video_adaptation",
+ ":video_bitrate_allocation",
+ ":video_bitrate_allocator",
+ ":video_bitrate_allocator_factory",
+ ":video_codec_constants",
+ ":video_frame",
+ ":video_layers_allocation",
+ "..:rtp_parameters",
+ "..:scoped_refptr",
+ "../:fec_controller_api",
+ "../:rtp_parameters",
+ "../adaptation:resource_adaptation_api",
+ "../units:data_rate",
+ "../video_codecs:video_codecs_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("video_frame_metadata") {
+ visibility = [ "*" ]
+ sources = [
+ "video_frame_metadata.cc",
+ "video_frame_metadata.h",
+ ]
+ deps = [
+ ":video_frame",
+ ":video_frame_type",
+ ":video_rtp_headers",
+ "..:array_view",
+ "../../modules/video_coding:codec_globals_headers",
+ "../../rtc_base/system:rtc_export",
+ "../transport/rtp:dependency_descriptor",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ "//third_party/abseil-cpp/absl/types:variant",
+ ]
+}
+
+rtc_library("builtin_video_bitrate_allocator_factory") {
+ visibility = [ "*" ]
+ sources = [
+ "builtin_video_bitrate_allocator_factory.cc",
+ "builtin_video_bitrate_allocator_factory.h",
+ ]
+
+ deps = [
+ ":video_bitrate_allocation",
+ ":video_bitrate_allocator",
+ ":video_bitrate_allocator_factory",
+ "../../api:scoped_refptr",
+ "../../media:rtc_media_base",
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding/svc:svc_rate_allocator",
+ "../video_codecs:video_codecs_api",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ]
+}
+
+rtc_library("frame_buffer") {
+ visibility = [ "*" ]
+ sources = [
+ "frame_buffer.cc",
+ "frame_buffer.h",
+ ]
+ deps = [
+ "../../api:field_trials_view",
+ "../../api/units:timestamp",
+ "../../api/video:encoded_frame",
+ "../../modules/video_coding:video_coding_utility",
+ "../../rtc_base:logging",
+ "../../rtc_base:rtc_numerics",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("frame_buffer_unittest") {
+ testonly = true
+ sources = [ "frame_buffer_unittest.cc" ]
+
+ deps = [
+ ":frame_buffer",
+ "../../api/video:encoded_frame",
+ "../../test:fake_encoded_frame",
+ "../../test:field_trial",
+ "../../test:scoped_key_value_config",
+ "../../test:test_support",
+ ]
+}
+
+if (rtc_include_tests) {
+ rtc_library("video_unittests") {
+ testonly = true
+ sources = [ "video_stream_decoder_create_unittest.cc" ]
+ deps = [
+ ":video_frame_metadata",
+ ":video_frame_type",
+ ":video_stream_decoder_create",
+ "../../modules/rtp_rtcp:rtp_video_header",
+ "../../test:test_support",
+ "../task_queue:default_task_queue_factory",
+ "../video_codecs:builtin_video_decoder_factory",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/api/video/DEPS b/third_party/libwebrtc/api/video/DEPS
new file mode 100644
index 0000000000..c84299f943
--- /dev/null
+++ b/third_party/libwebrtc/api/video/DEPS
@@ -0,0 +1,77 @@
+specific_include_rules = {
+ # Until the new VideoStreamDecoder is implemented the current decoding
+ # pipeline will be used, and therefore EncodedFrame needs to inherit
+ # VCMEncodedFrame.
+ "encoded_frame.h": [
+ "+modules/video_coding/encoded_frame.h",
+ ],
+
+ "encoded_image\.h" : [
+ "+rtc_base/ref_count.h",
+ ],
+
+ "i010_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "i210_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "i410_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "i420_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "i422_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "i444_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "nv12_buffer\.h": [
+ "+rtc_base/memory/aligned_malloc.h",
+ ],
+
+ "recordable_encoded_frame\.h": [
+ "+rtc_base/ref_count.h",
+ ],
+
+ "video_frame\.h": [
+ ],
+
+ "video_frame_buffer\.h": [
+ "+rtc_base/ref_count.h",
+ ],
+
+ "video_frame_metadata\.h": [
+ "+modules/video_coding/codecs/h264/include/h264_globals.h",
+ "+modules/video_coding/codecs/vp8/include/vp8_globals.h",
+ "+modules/video_coding/codecs/vp9/include/vp9_globals.h",
+ ],
+
+ "video_stream_decoder_create.cc": [
+ "+video/video_stream_decoder_impl.h",
+ ],
+
+ "video_stream_encoder_create.cc": [
+ "+video/video_stream_encoder.h",
+ ],
+
+ "rtp_video_frame_assembler.h": [
+ "+modules/rtp_rtcp/source/rtp_packet_received.h",
+ ],
+
+ "frame_buffer.h": [
+ "+modules/video_coding/utility/decoded_frames_history.h",
+ ],
+
+ "video_frame_matchers\.h": [
+ "+test/gmock.h",
+ ],
+}
diff --git a/third_party/libwebrtc/api/video/OWNERS b/third_party/libwebrtc/api/video/OWNERS
new file mode 100644
index 0000000000..49b62f3780
--- /dev/null
+++ b/third_party/libwebrtc/api/video/OWNERS
@@ -0,0 +1,5 @@
+brandtr@webrtc.org
+magjed@webrtc.org
+philipel@webrtc.org
+
+per-file video_timing.h=ilnik@webrtc.org
diff --git a/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.cc b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.cc
new file mode 100644
index 0000000000..4c24a0e75d
--- /dev/null
+++ b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.cc
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+
+#include <memory>
+
+#include "absl/base/macros.h"
+#include "api/video/video_bitrate_allocator.h"
+#include "api/video_codecs/video_codec.h"
+#include "modules/video_coding/svc/svc_rate_allocator.h"
+#include "modules/video_coding/utility/simulcast_rate_allocator.h"
+
+namespace webrtc {
+
+namespace {
+
+class BuiltinVideoBitrateAllocatorFactory
+ : public VideoBitrateAllocatorFactory {
+ public:
+ BuiltinVideoBitrateAllocatorFactory() = default;
+ ~BuiltinVideoBitrateAllocatorFactory() override = default;
+
+ std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
+ const VideoCodec& codec) override {
+ switch (codec.codecType) {
+ case kVideoCodecAV1:
+ case kVideoCodecVP9:
+ return std::make_unique<SvcRateAllocator>(codec);
+ default:
+ return std::make_unique<SimulcastRateAllocator>(codec);
+ }
+ }
+};
+
+} // namespace
+
+std::unique_ptr<VideoBitrateAllocatorFactory>
+CreateBuiltinVideoBitrateAllocatorFactory() {
+ return std::make_unique<BuiltinVideoBitrateAllocatorFactory>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.h b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.h
new file mode 100644
index 0000000000..ac880a0863
--- /dev/null
+++ b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
+#define API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
+
+#include <memory>
+
+#include "api/video/video_bitrate_allocator_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoBitrateAllocatorFactory>
+CreateBuiltinVideoBitrateAllocatorFactory();
+
+} // namespace webrtc
+
+#endif // API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory_gn/moz.build b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory_gn/moz.build
new file mode 100644
index 0000000000..40823f2355
--- /dev/null
+++ b/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory_gn/moz.build
@@ -0,0 +1,233 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/builtin_video_bitrate_allocator_factory.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "GLESv2",
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("builtin_video_bitrate_allocator_factory_gn")
diff --git a/third_party/libwebrtc/api/video/color_space.cc b/third_party/libwebrtc/api/video/color_space.cc
new file mode 100644
index 0000000000..a0cd32edb2
--- /dev/null
+++ b/third_party/libwebrtc/api/video/color_space.cc
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/color_space.h"
+
+namespace webrtc {
+namespace {
+// Try to convert `enum_value` into the enum class T. `enum_bitmask` is created
+// by the funciton below. Returns true if conversion was successful, false
+// otherwise.
+template <typename T>
+bool SetFromUint8(uint8_t enum_value, uint64_t enum_bitmask, T* out) {
+ if ((enum_value < 64) && ((enum_bitmask >> enum_value) & 1)) {
+ *out = static_cast<T>(enum_value);
+ return true;
+ }
+ return false;
+}
+
+// This function serves as an assert for the constexpr function below. It's on
+// purpose not declared as constexpr so that it causes a build problem if enum
+// values of 64 or above are used. The bitmask and the code generating it would
+// have to be extended if the standard is updated to include enum values >= 64.
+int EnumMustBeLessThan64() {
+ return -1;
+}
+
+template <typename T, size_t N>
+constexpr int MakeMask(const int index, const int length, T (&values)[N]) {
+ return length > 1
+ ? (MakeMask(index, 1, values) +
+ MakeMask(index + 1, length - 1, values))
+ : (static_cast<uint8_t>(values[index]) < 64
+ ? (uint64_t{1} << static_cast<uint8_t>(values[index]))
+ : EnumMustBeLessThan64());
+}
+
+// Create a bitmask where each bit corresponds to one potential enum value.
+// `values` should be an array listing all possible enum values. The bit is set
+// to one if the corresponding enum exists. Only works for enums with values
+// less than 64.
+template <typename T, size_t N>
+constexpr uint64_t CreateEnumBitmask(T (&values)[N]) {
+ return MakeMask(0, N, values);
+}
+
+bool SetChromaSitingFromUint8(uint8_t enum_value,
+ ColorSpace::ChromaSiting* chroma_siting) {
+ constexpr ColorSpace::ChromaSiting kChromaSitings[] = {
+ ColorSpace::ChromaSiting::kUnspecified,
+ ColorSpace::ChromaSiting::kCollocated, ColorSpace::ChromaSiting::kHalf};
+ constexpr uint64_t enum_bitmask = CreateEnumBitmask(kChromaSitings);
+
+ return SetFromUint8(enum_value, enum_bitmask, chroma_siting);
+}
+
+} // namespace
+
+ColorSpace::ColorSpace() = default;
+ColorSpace::ColorSpace(const ColorSpace& other) = default;
+ColorSpace::ColorSpace(ColorSpace&& other) = default;
+ColorSpace& ColorSpace::operator=(const ColorSpace& other) = default;
+
+ColorSpace::ColorSpace(PrimaryID primaries,
+ TransferID transfer,
+ MatrixID matrix,
+ RangeID range)
+ : ColorSpace(primaries,
+ transfer,
+ matrix,
+ range,
+ ChromaSiting::kUnspecified,
+ ChromaSiting::kUnspecified,
+ nullptr) {}
+
+ColorSpace::ColorSpace(PrimaryID primaries,
+ TransferID transfer,
+ MatrixID matrix,
+ RangeID range,
+ ChromaSiting chroma_siting_horz,
+ ChromaSiting chroma_siting_vert,
+ const HdrMetadata* hdr_metadata)
+ : primaries_(primaries),
+ transfer_(transfer),
+ matrix_(matrix),
+ range_(range),
+ chroma_siting_horizontal_(chroma_siting_horz),
+ chroma_siting_vertical_(chroma_siting_vert),
+ hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata)
+ : absl::nullopt) {}
+
+ColorSpace::PrimaryID ColorSpace::primaries() const {
+ return primaries_;
+}
+
+ColorSpace::TransferID ColorSpace::transfer() const {
+ return transfer_;
+}
+
+ColorSpace::MatrixID ColorSpace::matrix() const {
+ return matrix_;
+}
+
+ColorSpace::RangeID ColorSpace::range() const {
+ return range_;
+}
+
+ColorSpace::ChromaSiting ColorSpace::chroma_siting_horizontal() const {
+ return chroma_siting_horizontal_;
+}
+
+ColorSpace::ChromaSiting ColorSpace::chroma_siting_vertical() const {
+ return chroma_siting_vertical_;
+}
+
+const HdrMetadata* ColorSpace::hdr_metadata() const {
+ return hdr_metadata_ ? &*hdr_metadata_ : nullptr;
+}
+
+bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) {
+ constexpr PrimaryID kPrimaryIds[] = {
+ PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M,
+ PrimaryID::kBT470BG, PrimaryID::kSMPTE170M, PrimaryID::kSMPTE240M,
+ PrimaryID::kFILM, PrimaryID::kBT2020, PrimaryID::kSMPTEST428,
+ PrimaryID::kSMPTEST431, PrimaryID::kSMPTEST432, PrimaryID::kJEDECP22};
+ constexpr uint64_t enum_bitmask = CreateEnumBitmask(kPrimaryIds);
+
+ return SetFromUint8(enum_value, enum_bitmask, &primaries_);
+}
+
+bool ColorSpace::set_transfer_from_uint8(uint8_t enum_value) {
+ constexpr TransferID kTransferIds[] = {
+ TransferID::kBT709, TransferID::kUnspecified,
+ TransferID::kGAMMA22, TransferID::kGAMMA28,
+ TransferID::kSMPTE170M, TransferID::kSMPTE240M,
+ TransferID::kLINEAR, TransferID::kLOG,
+ TransferID::kLOG_SQRT, TransferID::kIEC61966_2_4,
+ TransferID::kBT1361_ECG, TransferID::kIEC61966_2_1,
+ TransferID::kBT2020_10, TransferID::kBT2020_12,
+ TransferID::kSMPTEST2084, TransferID::kSMPTEST428,
+ TransferID::kARIB_STD_B67};
+ constexpr uint64_t enum_bitmask = CreateEnumBitmask(kTransferIds);
+
+ return SetFromUint8(enum_value, enum_bitmask, &transfer_);
+}
+
+bool ColorSpace::set_matrix_from_uint8(uint8_t enum_value) {
+ constexpr MatrixID kMatrixIds[] = {
+ MatrixID::kRGB, MatrixID::kBT709, MatrixID::kUnspecified,
+ MatrixID::kFCC, MatrixID::kBT470BG, MatrixID::kSMPTE170M,
+ MatrixID::kSMPTE240M, MatrixID::kYCOCG, MatrixID::kBT2020_NCL,
+ MatrixID::kBT2020_CL, MatrixID::kSMPTE2085, MatrixID::kCDNCLS,
+ MatrixID::kCDCLS, MatrixID::kBT2100_ICTCP};
+ constexpr uint64_t enum_bitmask = CreateEnumBitmask(kMatrixIds);
+
+ return SetFromUint8(enum_value, enum_bitmask, &matrix_);
+}
+
+bool ColorSpace::set_range_from_uint8(uint8_t enum_value) {
+ constexpr RangeID kRangeIds[] = {RangeID::kInvalid, RangeID::kLimited,
+ RangeID::kFull, RangeID::kDerived};
+ constexpr uint64_t enum_bitmask = CreateEnumBitmask(kRangeIds);
+
+ return SetFromUint8(enum_value, enum_bitmask, &range_);
+}
+
+bool ColorSpace::set_chroma_siting_horizontal_from_uint8(uint8_t enum_value) {
+ return SetChromaSitingFromUint8(enum_value, &chroma_siting_horizontal_);
+}
+
+bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) {
+ return SetChromaSitingFromUint8(enum_value, &chroma_siting_vertical_);
+}
+
+void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) {
+ hdr_metadata_ =
+ hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/color_space.h b/third_party/libwebrtc/api/video/color_space.h
new file mode 100644
index 0000000000..a7ad86b016
--- /dev/null
+++ b/third_party/libwebrtc/api/video/color_space.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_COLOR_SPACE_H_
+#define API_VIDEO_COLOR_SPACE_H_
+
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+#include "api/video/hdr_metadata.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// This class represents color information as specified in T-REC H.273,
+// available from https://www.itu.int/rec/T-REC-H.273.
+//
+// WebRTC's supported codecs:
+// - VP9 supports color profiles, see VP9 Bitstream & Decoding Process
+// Specification Version 0.6 Section 7.2.2 "Color config semantics" available
+// from https://www.webmproject.org.
+// - VP8 only supports BT.601, see
+// https://tools.ietf.org/html/rfc6386#section-9.2
+// - H264 uses the exact same representation as T-REC H.273. See T-REC-H.264
+// E.2.1, "VUI parameters semantics", available from
+// https://www.itu.int/rec/T-REC-H.264.
+
+class RTC_EXPORT ColorSpace {
+ public:
+ enum class PrimaryID : uint8_t {
+ // The indices are equal to the values specified in T-REC H.273 Table 2.
+ kBT709 = 1,
+ kUnspecified = 2,
+ kBT470M = 4,
+ kBT470BG = 5,
+ kSMPTE170M = 6, // Identical to BT601
+ kSMPTE240M = 7,
+ kFILM = 8,
+ kBT2020 = 9,
+ kSMPTEST428 = 10,
+ kSMPTEST431 = 11,
+ kSMPTEST432 = 12,
+ kJEDECP22 = 22, // Identical to EBU3213-E
+ // When adding/removing entries here, please make sure to do the
+ // corresponding change to kPrimaryIds.
+ };
+
+ enum class TransferID : uint8_t {
+ // The indices are equal to the values specified in T-REC H.273 Table 3.
+ kBT709 = 1,
+ kUnspecified = 2,
+ kGAMMA22 = 4,
+ kGAMMA28 = 5,
+ kSMPTE170M = 6,
+ kSMPTE240M = 7,
+ kLINEAR = 8,
+ kLOG = 9,
+ kLOG_SQRT = 10,
+ kIEC61966_2_4 = 11,
+ kBT1361_ECG = 12,
+ kIEC61966_2_1 = 13,
+ kBT2020_10 = 14,
+ kBT2020_12 = 15,
+ kSMPTEST2084 = 16,
+ kSMPTEST428 = 17,
+ kARIB_STD_B67 = 18,
+ // When adding/removing entries here, please make sure to do the
+ // corresponding change to kTransferIds.
+ };
+
+ enum class MatrixID : uint8_t {
+ // The indices are equal to the values specified in T-REC H.273 Table 4.
+ kRGB = 0,
+ kBT709 = 1,
+ kUnspecified = 2,
+ kFCC = 4,
+ kBT470BG = 5,
+ kSMPTE170M = 6,
+ kSMPTE240M = 7,
+ kYCOCG = 8,
+ kBT2020_NCL = 9,
+ kBT2020_CL = 10,
+ kSMPTE2085 = 11,
+ kCDNCLS = 12,
+ kCDCLS = 13,
+ kBT2100_ICTCP = 14,
+ // When adding/removing entries here, please make sure to do the
+ // corresponding change to kMatrixIds.
+ };
+
+ enum class RangeID {
+ // The indices are equal to the values specified at
+ // https://www.webmproject.org/docs/container/#colour for the element Range.
+ kInvalid = 0,
+ // Limited Rec. 709 color range with RGB values ranging from 16 to 235.
+ kLimited = 1,
+ // Full RGB color range with RGB valees from 0 to 255.
+ kFull = 2,
+ // Range is defined by MatrixCoefficients/TransferCharacteristics.
+ kDerived = 3,
+ // When adding/removing entries here, please make sure to do the
+ // corresponding change to kRangeIds.
+ };
+
+ enum class ChromaSiting {
+ // Chroma siting specifies how chroma is subsampled relative to the luma
+ // samples in a YUV video frame.
+ // The indices are equal to the values specified at
+ // https://www.webmproject.org/docs/container/#colour for the element
+ // ChromaSitingVert and ChromaSitingHorz.
+ kUnspecified = 0,
+ kCollocated = 1,
+ kHalf = 2,
+ // When adding/removing entries here, please make sure to do the
+ // corresponding change to kChromaSitings.
+ };
+
+ ColorSpace();
+ ColorSpace(const ColorSpace& other);
+ ColorSpace(ColorSpace&& other);
+ ColorSpace& operator=(const ColorSpace& other);
+ ColorSpace(PrimaryID primaries,
+ TransferID transfer,
+ MatrixID matrix,
+ RangeID range);
+ ColorSpace(PrimaryID primaries,
+ TransferID transfer,
+ MatrixID matrix,
+ RangeID range,
+ ChromaSiting chroma_siting_horizontal,
+ ChromaSiting chroma_siting_vertical,
+ const HdrMetadata* hdr_metadata);
+ friend bool operator==(const ColorSpace& lhs, const ColorSpace& rhs) {
+ return lhs.primaries_ == rhs.primaries_ && lhs.transfer_ == rhs.transfer_ &&
+ lhs.matrix_ == rhs.matrix_ && lhs.range_ == rhs.range_ &&
+ lhs.chroma_siting_horizontal_ == rhs.chroma_siting_horizontal_ &&
+ lhs.chroma_siting_vertical_ == rhs.chroma_siting_vertical_ &&
+ lhs.hdr_metadata_ == rhs.hdr_metadata_;
+ }
+ friend bool operator!=(const ColorSpace& lhs, const ColorSpace& rhs) {
+ return !(lhs == rhs);
+ }
+
+ PrimaryID primaries() const;
+ TransferID transfer() const;
+ MatrixID matrix() const;
+ RangeID range() const;
+ ChromaSiting chroma_siting_horizontal() const;
+ ChromaSiting chroma_siting_vertical() const;
+ const HdrMetadata* hdr_metadata() const;
+
+ bool set_primaries_from_uint8(uint8_t enum_value);
+ bool set_transfer_from_uint8(uint8_t enum_value);
+ bool set_matrix_from_uint8(uint8_t enum_value);
+ bool set_range_from_uint8(uint8_t enum_value);
+ bool set_chroma_siting_horizontal_from_uint8(uint8_t enum_value);
+ bool set_chroma_siting_vertical_from_uint8(uint8_t enum_value);
+ void set_hdr_metadata(const HdrMetadata* hdr_metadata);
+
+ private:
+ PrimaryID primaries_ = PrimaryID::kUnspecified;
+ TransferID transfer_ = TransferID::kUnspecified;
+ MatrixID matrix_ = MatrixID::kUnspecified;
+ RangeID range_ = RangeID::kInvalid;
+ ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified;
+ ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified;
+ absl::optional<HdrMetadata> hdr_metadata_;
+};
+
+} // namespace webrtc
+#endif // API_VIDEO_COLOR_SPACE_H_
diff --git a/third_party/libwebrtc/api/video/encoded_frame.cc b/third_party/libwebrtc/api/video/encoded_frame.cc
new file mode 100644
index 0000000000..c5e2abbbb4
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_frame.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/encoded_frame.h"
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+absl::optional<Timestamp> EncodedFrame::ReceivedTimestamp() const {
+ return ReceivedTime() >= 0
+ ? absl::make_optional(Timestamp::Millis(ReceivedTime()))
+ : absl::nullopt;
+}
+
+absl::optional<Timestamp> EncodedFrame::RenderTimestamp() const {
+ return RenderTimeMs() >= 0
+ ? absl::make_optional(Timestamp::Millis(RenderTimeMs()))
+ : absl::nullopt;
+}
+
+bool EncodedFrame::delayed_by_retransmission() const {
+ return false;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/encoded_frame.h b/third_party/libwebrtc/api/video/encoded_frame.h
new file mode 100644
index 0000000000..66aee227bb
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_frame.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_ENCODED_FRAME_H_
+#define API_VIDEO_ENCODED_FRAME_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+#include "api/units/timestamp.h"
+#include "modules/video_coding/encoded_frame.h"
+
+namespace webrtc {
+
+// TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance.
+// TODO(philipel): Move transport specific info out of EncodedFrame.
+// NOTE: This class is still under development and may change without notice.
+class EncodedFrame : public webrtc::VCMEncodedFrame {
+ public:
+ static const uint8_t kMaxFrameReferences = 5;
+
+ EncodedFrame() = default;
+ EncodedFrame(const EncodedFrame&) = default;
+ virtual ~EncodedFrame() {}
+
+ // When this frame was received.
+ // TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
+ virtual int64_t ReceivedTime() const = 0;
+ // Returns a Timestamp from `ReceivedTime`, or nullopt if there is no receive
+ // time.
+ absl::optional<webrtc::Timestamp> ReceivedTimestamp() const;
+
+ // When this frame should be rendered.
+ // TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
+ virtual int64_t RenderTime() const = 0;
+ // Returns a Timestamp from `RenderTime`, or nullopt if there is no
+ // render time.
+ absl::optional<webrtc::Timestamp> RenderTimestamp() const;
+
+ // This information is currently needed by the timing calculation class.
+ // TODO(philipel): Remove this function when a new timing class has
+ // been implemented.
+ virtual bool delayed_by_retransmission() const;
+
+ bool is_keyframe() const { return num_references == 0; }
+
+ void SetId(int64_t id) { id_ = id; }
+ int64_t Id() const { return id_; }
+
+ // TODO(philipel): Add simple modify/access functions to prevent adding too
+ // many `references`.
+ size_t num_references = 0;
+ int64_t references[kMaxFrameReferences];
+ // Is this subframe the last one in the superframe (In RTP stream that would
+ // mean that the last packet has a marker bit set).
+ bool is_last_spatial_layer = true;
+
+ private:
+ // The ID of the frame is determined from RTP level information. The IDs are
+ // used to describe order and dependencies between frames.
+ int64_t id_ = -1;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_ENCODED_FRAME_H_
diff --git a/third_party/libwebrtc/api/video/encoded_frame_gn/moz.build b/third_party/libwebrtc/api/video/encoded_frame_gn/moz.build
new file mode 100644
index 0000000000..f5aee8c8e8
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_frame_gn/moz.build
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/encoded_frame.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("encoded_frame_gn")
diff --git a/third_party/libwebrtc/api/video/encoded_image.cc b/third_party/libwebrtc/api/video/encoded_image.cc
new file mode 100644
index 0000000000..ff61994dee
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_image.cc
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/encoded_image.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+namespace webrtc {
+
+EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) {
+ buffer_ = static_cast<uint8_t*>(malloc(size));
+}
+
+EncodedImageBuffer::EncodedImageBuffer(const uint8_t* data, size_t size)
+ : EncodedImageBuffer(size) {
+ memcpy(buffer_, data, size);
+}
+
+EncodedImageBuffer::~EncodedImageBuffer() {
+ free(buffer_);
+}
+
+// static
+rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(size_t size) {
+ return rtc::make_ref_counted<EncodedImageBuffer>(size);
+}
+// static
+rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(
+ const uint8_t* data,
+ size_t size) {
+ return rtc::make_ref_counted<EncodedImageBuffer>(data, size);
+}
+
+const uint8_t* EncodedImageBuffer::data() const {
+ return buffer_;
+}
+uint8_t* EncodedImageBuffer::data() {
+ return buffer_;
+}
+size_t EncodedImageBuffer::size() const {
+ return size_;
+}
+
+void EncodedImageBuffer::Realloc(size_t size) {
+ // Calling realloc with size == 0 is equivalent to free, and returns nullptr.
+ // Which is confusing on systems where malloc(0) doesn't return a nullptr.
+ // More specifically, it breaks expectations of
+ // VCMSessionInfo::UpdateDataPointers.
+ RTC_DCHECK(size > 0);
+ buffer_ = static_cast<uint8_t*>(realloc(buffer_, size));
+ size_ = size;
+}
+
+EncodedImage::EncodedImage() = default;
+
+EncodedImage::EncodedImage(EncodedImage&&) = default;
+EncodedImage::EncodedImage(const EncodedImage&) = default;
+
+EncodedImage::~EncodedImage() = default;
+
+EncodedImage& EncodedImage::operator=(EncodedImage&&) = default;
+EncodedImage& EncodedImage::operator=(const EncodedImage&) = default;
+
+void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
+ int64_t encode_finish_ms) {
+ timing_.encode_start_ms = encode_start_ms;
+ timing_.encode_finish_ms = encode_finish_ms;
+}
+
+absl::optional<size_t> EncodedImage::SpatialLayerFrameSize(
+ int spatial_index) const {
+ RTC_DCHECK_GE(spatial_index, 0);
+ RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0));
+
+ auto it = spatial_layer_frame_size_bytes_.find(spatial_index);
+ if (it == spatial_layer_frame_size_bytes_.end()) {
+ return absl::nullopt;
+ }
+
+ return it->second;
+}
+
+void EncodedImage::SetSpatialLayerFrameSize(int spatial_index,
+ size_t size_bytes) {
+ RTC_DCHECK_GE(spatial_index, 0);
+ RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0));
+ RTC_DCHECK_GE(size_bytes, 0);
+ spatial_layer_frame_size_bytes_[spatial_index] = size_bytes;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/encoded_image.h b/third_party/libwebrtc/api/video/encoded_image.h
new file mode 100644
index 0000000000..dae790c46c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_image.h
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_ENCODED_IMAGE_H_
+#define API_VIDEO_ENCODED_IMAGE_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/rtp_packet_infos.h"
+#include "api/scoped_refptr.h"
+#include "api/video/color_space.h"
+#include "api/video/video_codec_constants.h"
+#include "api/video/video_content_type.h"
+#include "api/video/video_frame_type.h"
+#include "api/video/video_rotation.h"
+#include "api/video/video_timing.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Abstract interface for buffer storage. Intended to support buffers owned by
+// external encoders with special release requirements, e.g, java encoders with
+// releaseOutputBuffer.
+class EncodedImageBufferInterface : public rtc::RefCountInterface {
+ public:
+ virtual const uint8_t* data() const = 0;
+ // TODO(bugs.webrtc.org/9378): Make interface essentially read-only, delete
+ // this non-const data method.
+ virtual uint8_t* data() = 0;
+ virtual size_t size() const = 0;
+};
+
+// Basic implementation of EncodedImageBufferInterface.
+class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface {
+ public:
+ static rtc::scoped_refptr<EncodedImageBuffer> Create() { return Create(0); }
+ static rtc::scoped_refptr<EncodedImageBuffer> Create(size_t size);
+ static rtc::scoped_refptr<EncodedImageBuffer> Create(const uint8_t* data,
+ size_t size);
+
+ const uint8_t* data() const override;
+ uint8_t* data() override;
+ size_t size() const override;
+ void Realloc(size_t t);
+
+ protected:
+ explicit EncodedImageBuffer(size_t size);
+ EncodedImageBuffer(const uint8_t* data, size_t size);
+ ~EncodedImageBuffer();
+
+ size_t size_;
+ uint8_t* buffer_;
+};
+
+// TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being
+// cleaned up. Direct use of its members is strongly discouraged.
+class RTC_EXPORT EncodedImage {
+ public:
+ EncodedImage();
+ EncodedImage(EncodedImage&&);
+ EncodedImage(const EncodedImage&);
+
+ ~EncodedImage();
+
+ EncodedImage& operator=(EncodedImage&&);
+ EncodedImage& operator=(const EncodedImage&);
+
+ // TODO(bugs.webrtc.org/9378): Change style to timestamp(), set_timestamp(),
+ // for consistency with the VideoFrame class. Set frame timestamp (90kHz).
+ void SetTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
+
+ // Get frame timestamp (90kHz).
+ uint32_t Timestamp() const { return timestamp_rtp_; }
+
+ void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
+
+ int64_t NtpTimeMs() const { return ntp_time_ms_; }
+
+ absl::optional<int> SpatialIndex() const { return spatial_index_; }
+ void SetSpatialIndex(absl::optional<int> spatial_index) {
+ RTC_DCHECK_GE(spatial_index.value_or(0), 0);
+ RTC_DCHECK_LT(spatial_index.value_or(0), kMaxSpatialLayers);
+ spatial_index_ = spatial_index;
+ }
+
+ absl::optional<int> TemporalIndex() const { return temporal_index_; }
+ void SetTemporalIndex(absl::optional<int> temporal_index) {
+ RTC_DCHECK_GE(temporal_index_.value_or(0), 0);
+ RTC_DCHECK_LT(temporal_index_.value_or(0), kMaxTemporalStreams);
+ temporal_index_ = temporal_index;
+ }
+
+ // These methods can be used to set/get size of subframe with spatial index
+ // `spatial_index` on encoded frames that consist of multiple spatial layers.
+ absl::optional<size_t> SpatialLayerFrameSize(int spatial_index) const;
+ void SetSpatialLayerFrameSize(int spatial_index, size_t size_bytes);
+
+ const webrtc::ColorSpace* ColorSpace() const {
+ return color_space_ ? &*color_space_ : nullptr;
+ }
+ void SetColorSpace(const absl::optional<webrtc::ColorSpace>& color_space) {
+ color_space_ = color_space;
+ }
+
+ // These methods along with the private member video_frame_tracking_id_ are
+ // meant for media quality testing purpose only.
+ absl::optional<uint16_t> VideoFrameTrackingId() const {
+ return video_frame_tracking_id_;
+ }
+ void SetVideoFrameTrackingId(absl::optional<uint16_t> tracking_id) {
+ video_frame_tracking_id_ = tracking_id;
+ }
+
+ const RtpPacketInfos& PacketInfos() const { return packet_infos_; }
+ void SetPacketInfos(RtpPacketInfos packet_infos) {
+ packet_infos_ = std::move(packet_infos);
+ }
+
+ bool RetransmissionAllowed() const { return retransmission_allowed_; }
+ void SetRetransmissionAllowed(bool retransmission_allowed) {
+ retransmission_allowed_ = retransmission_allowed;
+ }
+
+ size_t size() const { return size_; }
+ void set_size(size_t new_size) {
+ // Allow set_size(0) even if we have no buffer.
+ RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity());
+ size_ = new_size;
+ }
+
+ void SetEncodedData(
+ rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data) {
+ encoded_data_ = encoded_data;
+ size_ = encoded_data->size();
+ }
+
+ void ClearEncodedData() {
+ encoded_data_ = nullptr;
+ size_ = 0;
+ }
+
+ rtc::scoped_refptr<EncodedImageBufferInterface> GetEncodedData() const {
+ return encoded_data_;
+ }
+
+ const uint8_t* data() const {
+ return encoded_data_ ? encoded_data_->data() : nullptr;
+ }
+
+ // Returns whether the encoded image can be considered to be of target
+ // quality.
+ bool IsAtTargetQuality() const { return at_target_quality_; }
+
+ // Sets that the encoded image can be considered to be of target quality to
+ // true or false.
+ void SetAtTargetQuality(bool at_target_quality) {
+ at_target_quality_ = at_target_quality;
+ }
+
+ uint32_t _encodedWidth = 0;
+ uint32_t _encodedHeight = 0;
+ // NTP time of the capture time in local timebase in milliseconds.
+ // TODO(minyue): make this member private.
+ int64_t ntp_time_ms_ = 0;
+ int64_t capture_time_ms_ = 0;
+ VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta;
+ VideoRotation rotation_ = kVideoRotation_0;
+ VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
+ int qp_ = -1; // Quantizer value.
+
+ // When an application indicates non-zero values here, it is taken as an
+ // indication that all future frames will be constrained with those limits
+ // until the application indicates a change again.
+ VideoPlayoutDelay playout_delay_;
+
+ struct Timing {
+ uint8_t flags = VideoSendTiming::kInvalid;
+ int64_t encode_start_ms = 0;
+ int64_t encode_finish_ms = 0;
+ int64_t packetization_finish_ms = 0;
+ int64_t pacer_exit_ms = 0;
+ int64_t network_timestamp_ms = 0;
+ int64_t network2_timestamp_ms = 0;
+ int64_t receive_start_ms = 0;
+ int64_t receive_finish_ms = 0;
+ } timing_;
+
+ private:
+ size_t capacity() const { return encoded_data_ ? encoded_data_->size() : 0; }
+
+ rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data_;
+ size_t size_ = 0; // Size of encoded frame data.
+ uint32_t timestamp_rtp_ = 0;
+ absl::optional<int> spatial_index_;
+ absl::optional<int> temporal_index_;
+ std::map<int, size_t> spatial_layer_frame_size_bytes_;
+ absl::optional<webrtc::ColorSpace> color_space_;
+ // This field is meant for media quality testing purpose only. When enabled it
+ // carries the webrtc::VideoFrame id field from the sender to the receiver.
+ absl::optional<uint16_t> video_frame_tracking_id_;
+ // Information about packets used to assemble this video frame. This is needed
+ // by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's
+ // MediaStreamTrack, in order to implement getContributingSources(). See:
+ // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
+ RtpPacketInfos packet_infos_;
+ bool retransmission_allowed_ = true;
+ // True if the encoded image can be considered to be of target quality.
+ bool at_target_quality_ = false;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_ENCODED_IMAGE_H_
diff --git a/third_party/libwebrtc/api/video/encoded_image_gn/moz.build b/third_party/libwebrtc/api/video/encoded_image_gn/moz.build
new file mode 100644
index 0000000000..aebcf21b37
--- /dev/null
+++ b/third_party/libwebrtc/api/video/encoded_image_gn/moz.build
@@ -0,0 +1,225 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/encoded_image.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("encoded_image_gn")
diff --git a/third_party/libwebrtc/api/video/frame_buffer.cc b/third_party/libwebrtc/api/video/frame_buffer.cc
new file mode 100644
index 0000000000..f5d93f5f76
--- /dev/null
+++ b/third_party/libwebrtc/api/video/frame_buffer.cc
@@ -0,0 +1,332 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/frame_buffer.h"
+
+#include <algorithm>
+
+#include "absl/algorithm/container.h"
+#include "absl/container/inlined_vector.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/sequence_number_util.h"
+#include "rtc_base/trace_event.h"
+
+namespace webrtc {
+namespace {
+bool ValidReferences(const EncodedFrame& frame) {
+ // All references must point backwards, and duplicates are not allowed.
+ for (size_t i = 0; i < frame.num_references; ++i) {
+ if (frame.references[i] >= frame.Id())
+ return false;
+
+ for (size_t j = i + 1; j < frame.num_references; ++j) {
+ if (frame.references[i] == frame.references[j])
+ return false;
+ }
+ }
+
+ return true;
+}
+
+// Since FrameBuffer::FrameInfo is private it can't be used in the function
+// signature, hence the FrameIteratorT type.
+template <typename FrameIteratorT>
+rtc::ArrayView<const int64_t> GetReferences(const FrameIteratorT& it) {
+ return {it->second.encoded_frame->references,
+ std::min<size_t>(it->second.encoded_frame->num_references,
+ EncodedFrame::kMaxFrameReferences)};
+}
+
+template <typename FrameIteratorT>
+int64_t GetFrameId(const FrameIteratorT& it) {
+ return it->first;
+}
+
+template <typename FrameIteratorT>
+uint32_t GetTimestamp(const FrameIteratorT& it) {
+ return it->second.encoded_frame->Timestamp();
+}
+
+template <typename FrameIteratorT>
+bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) {
+ return it->second.encoded_frame->is_last_spatial_layer;
+}
+} // namespace
+
+FrameBuffer::FrameBuffer(int max_size,
+ int max_decode_history,
+ const FieldTrialsView& field_trials)
+ : legacy_frame_id_jump_behavior_(
+ !field_trials.IsDisabled("WebRTC-LegacyFrameIdJumpBehavior")),
+ max_size_(max_size),
+ decoded_frame_history_(max_decode_history) {}
+
+bool FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
+ const uint32_t ssrc =
+ frame->PacketInfos().empty() ? 0 : frame->PacketInfos()[0].ssrc();
+ if (!ValidReferences(*frame)) {
+ TRACE_EVENT2("webrtc",
+ "FrameBuffer::InsertFrame Frame dropped (Invalid references)",
+ "remote_ssrc", ssrc, "frame_id", frame->Id());
+ RTC_DLOG(LS_WARNING) << "Frame " << frame->Id()
+ << " has invalid references, dropping frame.";
+ return false;
+ }
+
+ if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) {
+ if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() &&
+ AheadOf(frame->Timestamp(),
+ *decoded_frame_history_.GetLastDecodedFrameTimestamp())) {
+ TRACE_EVENT2("webrtc",
+ "FrameBuffer::InsertFrame Frames dropped (OOO + PicId jump)",
+ "remote_ssrc", ssrc, "frame_id", frame->Id());
+ RTC_DLOG(LS_WARNING)
+ << "Keyframe " << frame->Id()
+ << " has newer timestamp but older picture id, clearing buffer.";
+ Clear();
+ } else {
+ // Already decoded past this frame.
+ TRACE_EVENT2("webrtc",
+ "FrameBuffer::InsertFrame Frame dropped (Out of order)",
+ "remote_ssrc", ssrc, "frame_id", frame->Id());
+ return false;
+ }
+ }
+
+ if (frames_.size() == max_size_) {
+ if (frame->is_keyframe()) {
+ TRACE_EVENT2("webrtc",
+ "FrameBuffer::InsertFrame Frames dropped (KF + Full buffer)",
+ "remote_ssrc", ssrc, "frame_id", frame->Id());
+ RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id()
+ << " inserted into full buffer, clearing buffer.";
+ Clear();
+ } else {
+ // No space for this frame.
+ TRACE_EVENT2("webrtc",
+ "FrameBuffer::InsertFrame Frame dropped (Full buffer)",
+ "remote_ssrc", ssrc, "frame_id", frame->Id());
+ return false;
+ }
+ }
+
+ const int64_t frame_id = frame->Id();
+ auto insert_res = frames_.emplace(frame_id, FrameInfo{std::move(frame)});
+ if (!insert_res.second) {
+ // Frame has already been inserted.
+ return false;
+ }
+
+ if (frames_.size() == max_size_) {
+ RTC_DLOG(LS_WARNING) << "Frame " << frame_id
+ << " inserted, buffer is now full.";
+ }
+
+ PropagateContinuity(insert_res.first);
+ FindNextAndLastDecodableTemporalUnit();
+ return true;
+}
+
+absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
+FrameBuffer::ExtractNextDecodableTemporalUnit() {
+ absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4> res;
+ if (!next_decodable_temporal_unit_) {
+ return res;
+ }
+
+ auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
+ for (auto it = next_decodable_temporal_unit_->first_frame; it != end_it;
+ ++it) {
+ decoded_frame_history_.InsertDecoded(GetFrameId(it), GetTimestamp(it));
+ res.push_back(std::move(it->second.encoded_frame));
+ }
+
+ DropNextDecodableTemporalUnit();
+ return res;
+}
+
+void FrameBuffer::DropNextDecodableTemporalUnit() {
+ if (!next_decodable_temporal_unit_) {
+ return;
+ }
+
+ auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
+
+ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), end_it);
+
+ frames_.erase(frames_.begin(), end_it);
+ FindNextAndLastDecodableTemporalUnit();
+}
+
+void FrameBuffer::UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it,
+ FrameIterator end_it) {
+ uint32_t dropped_ssrc = 0;
+ int64_t dropped_frame_id = 0;
+ unsigned int num_discarded_packets = 0;
+ unsigned int num_dropped_frames =
+ std::count_if(begin_it, end_it, [&](const auto& f) {
+ if (f.second.encoded_frame) {
+ const auto& packetInfos = f.second.encoded_frame->PacketInfos();
+ dropped_frame_id = f.first;
+ if (!packetInfos.empty()) {
+ dropped_ssrc = packetInfos[0].ssrc();
+ }
+ num_discarded_packets += packetInfos.size();
+ }
+ return f.second.encoded_frame != nullptr;
+ });
+
+ if (num_dropped_frames > 0) {
+ TRACE_EVENT2("webrtc", "FrameBuffer Dropping Old Frames", "remote_ssrc",
+ dropped_ssrc, "frame_id", dropped_frame_id);
+ }
+ if (num_discarded_packets > 0) {
+ TRACE_EVENT2("webrtc", "FrameBuffer Discarding Old Packets", "remote_ssrc",
+ dropped_ssrc, "frame_id", dropped_frame_id);
+ }
+
+ num_dropped_frames_ += num_dropped_frames;
+ num_discarded_packets_ += num_discarded_packets;
+}
+
+absl::optional<int64_t> FrameBuffer::LastContinuousFrameId() const {
+ return last_continuous_frame_id_;
+}
+
+absl::optional<int64_t> FrameBuffer::LastContinuousTemporalUnitFrameId() const {
+ return last_continuous_temporal_unit_frame_id_;
+}
+
+absl::optional<FrameBuffer::DecodabilityInfo>
+FrameBuffer::DecodableTemporalUnitsInfo() const {
+ return decodable_temporal_units_info_;
+}
+
+int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const {
+ return num_continuous_temporal_units_;
+}
+int FrameBuffer::GetTotalNumberOfDroppedFrames() const {
+ return num_dropped_frames_;
+}
+int FrameBuffer::GetTotalNumberOfDiscardedPackets() const {
+ return num_discarded_packets_;
+}
+
+size_t FrameBuffer::CurrentSize() const {
+ return frames_.size();
+}
+
+bool FrameBuffer::IsContinuous(const FrameIterator& it) const {
+ for (int64_t reference : GetReferences(it)) {
+ if (decoded_frame_history_.WasDecoded(reference)) {
+ continue;
+ }
+
+ auto reference_frame_it = frames_.find(reference);
+ if (reference_frame_it != frames_.end() &&
+ reference_frame_it->second.continuous) {
+ continue;
+ }
+
+ return false;
+ }
+
+ return true;
+}
+
+void FrameBuffer::PropagateContinuity(const FrameIterator& frame_it) {
+ for (auto it = frame_it; it != frames_.end(); ++it) {
+ if (!it->second.continuous) {
+ if (IsContinuous(it)) {
+ it->second.continuous = true;
+ if (last_continuous_frame_id_ < GetFrameId(it)) {
+ last_continuous_frame_id_ = GetFrameId(it);
+ }
+ if (IsLastFrameInTemporalUnit(it)) {
+ num_continuous_temporal_units_++;
+ if (last_continuous_temporal_unit_frame_id_ < GetFrameId(it)) {
+ last_continuous_temporal_unit_frame_id_ = GetFrameId(it);
+ }
+ }
+ }
+ }
+ }
+}
+
+void FrameBuffer::FindNextAndLastDecodableTemporalUnit() {
+ next_decodable_temporal_unit_.reset();
+ decodable_temporal_units_info_.reset();
+
+ if (!last_continuous_temporal_unit_frame_id_) {
+ return;
+ }
+
+ FrameIterator first_frame_it = frames_.begin();
+ FrameIterator last_frame_it = frames_.begin();
+ absl::InlinedVector<int64_t, 4> frames_in_temporal_unit;
+ uint32_t last_decodable_temporal_unit_timestamp;
+ for (auto frame_it = frames_.begin(); frame_it != frames_.end();) {
+ if (GetFrameId(frame_it) > *last_continuous_temporal_unit_frame_id_) {
+ break;
+ }
+
+ if (GetTimestamp(frame_it) != GetTimestamp(first_frame_it)) {
+ frames_in_temporal_unit.clear();
+ first_frame_it = frame_it;
+ }
+
+ frames_in_temporal_unit.push_back(GetFrameId(frame_it));
+
+ last_frame_it = frame_it++;
+
+ if (IsLastFrameInTemporalUnit(last_frame_it)) {
+ bool temporal_unit_decodable = true;
+ for (auto it = first_frame_it; it != frame_it && temporal_unit_decodable;
+ ++it) {
+ for (int64_t reference : GetReferences(it)) {
+ if (!decoded_frame_history_.WasDecoded(reference) &&
+ !absl::c_linear_search(frames_in_temporal_unit, reference)) {
+ // A frame in the temporal unit has a non-decoded reference outside
+ // the temporal unit, so it's not yet ready to be decoded.
+ temporal_unit_decodable = false;
+ break;
+ }
+ }
+ }
+
+ if (temporal_unit_decodable) {
+ if (!next_decodable_temporal_unit_) {
+ next_decodable_temporal_unit_ = {first_frame_it, last_frame_it};
+ }
+
+ last_decodable_temporal_unit_timestamp = GetTimestamp(first_frame_it);
+ }
+ }
+ }
+
+ if (next_decodable_temporal_unit_) {
+ decodable_temporal_units_info_ = {
+ .next_rtp_timestamp =
+ GetTimestamp(next_decodable_temporal_unit_->first_frame),
+ .last_rtp_timestamp = last_decodable_temporal_unit_timestamp};
+ }
+}
+
+void FrameBuffer::Clear() {
+ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), frames_.end());
+ frames_.clear();
+ next_decodable_temporal_unit_.reset();
+ decodable_temporal_units_info_.reset();
+ last_continuous_frame_id_.reset();
+ last_continuous_temporal_unit_frame_id_.reset();
+ decoded_frame_history_.Clear();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/frame_buffer.h b/third_party/libwebrtc/api/video/frame_buffer.h
new file mode 100644
index 0000000000..81fd12da58
--- /dev/null
+++ b/third_party/libwebrtc/api/video/frame_buffer.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_FRAME_BUFFER_H_
+#define API_VIDEO_FRAME_BUFFER_H_
+
+#include <map>
+#include <memory>
+#include <utility>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/video/encoded_frame.h"
+#include "modules/video_coding/utility/decoded_frames_history.h"
+
+namespace webrtc {
+// The high level idea of the FrameBuffer is to order frames received from the
+// network into a decodable stream. Frames are order by frame ID, and grouped
+// into temporal units by timestamp. A temporal unit is decodable after all
+// referenced frames outside the unit has been decoded, and a temporal unit is
+// continuous if all referenced frames are directly or indirectly decodable.
+// The FrameBuffer is thread-unsafe.
+class FrameBuffer {
+ public:
+ struct DecodabilityInfo {
+ uint32_t next_rtp_timestamp;
+ uint32_t last_rtp_timestamp;
+ };
+
+ // The `max_size` determines the maximum number of frames the buffer will
+ // store, and max_decode_history determines how far back (by frame ID) the
+ // buffer will store if a frame was decoded or not.
+ FrameBuffer(int max_size,
+ int max_decode_history,
+ // TODO(hta): remove field trials!
+ const FieldTrialsView& field_trials);
+ FrameBuffer(const FrameBuffer&) = delete;
+ FrameBuffer& operator=(const FrameBuffer&) = delete;
+ ~FrameBuffer() = default;
+
+ // Inserted frames may only reference backwards, and must have no duplicate
+ // references. Frame insertion will fail if `frame` is a duplicate, has
+ // already been decoded, invalid, or if the buffer is full and the frame is
+ // not a keyframe. Returns true if the frame was successfully inserted.
+ bool InsertFrame(std::unique_ptr<EncodedFrame> frame);
+
+ // Mark all frames belonging to the next decodable temporal unit as decoded
+ // and returns them.
+ absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
+ ExtractNextDecodableTemporalUnit();
+
+ // Drop all frames in the next decodable unit.
+ void DropNextDecodableTemporalUnit();
+
+ absl::optional<int64_t> LastContinuousFrameId() const;
+ absl::optional<int64_t> LastContinuousTemporalUnitFrameId() const;
+ absl::optional<DecodabilityInfo> DecodableTemporalUnitsInfo() const;
+
+ int GetTotalNumberOfContinuousTemporalUnits() const;
+ int GetTotalNumberOfDroppedFrames() const;
+ int GetTotalNumberOfDiscardedPackets() const;
+ size_t CurrentSize() const;
+
+ private:
+ struct FrameInfo {
+ std::unique_ptr<EncodedFrame> encoded_frame;
+ bool continuous = false;
+ };
+
+ using FrameMap = std::map<int64_t, FrameInfo>;
+ using FrameIterator = FrameMap::iterator;
+
+ struct TemporalUnit {
+ // Both first and last are inclusive.
+ FrameIterator first_frame;
+ FrameIterator last_frame;
+ };
+
+ bool IsContinuous(const FrameIterator& it) const;
+ void PropagateContinuity(const FrameIterator& frame_it);
+ void FindNextAndLastDecodableTemporalUnit();
+ void Clear();
+ void UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it,
+ FrameIterator end_it);
+
+ const bool legacy_frame_id_jump_behavior_;
+ const size_t max_size_;
+ FrameMap frames_;
+ absl::optional<TemporalUnit> next_decodable_temporal_unit_;
+ absl::optional<DecodabilityInfo> decodable_temporal_units_info_;
+ absl::optional<int64_t> last_continuous_frame_id_;
+ absl::optional<int64_t> last_continuous_temporal_unit_frame_id_;
+ video_coding::DecodedFramesHistory decoded_frame_history_;
+
+ int num_continuous_temporal_units_ = 0;
+ int num_dropped_frames_ = 0;
+ int num_discarded_packets_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/frame_buffer_gn/moz.build b/third_party/libwebrtc/api/video/frame_buffer_gn/moz.build
new file mode 100644
index 0000000000..b4c504524c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/frame_buffer_gn/moz.build
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/frame_buffer.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("frame_buffer_gn")
diff --git a/third_party/libwebrtc/api/video/frame_buffer_unittest.cc b/third_party/libwebrtc/api/video/frame_buffer_unittest.cc
new file mode 100644
index 0000000000..92e2f67540
--- /dev/null
+++ b/third_party/libwebrtc/api/video/frame_buffer_unittest.cc
@@ -0,0 +1,393 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/frame_buffer.h"
+
+#include <vector>
+
+#include "api/video/encoded_frame.h"
+#include "test/fake_encoded_frame.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Eq;
+using ::testing::IsEmpty;
+using ::testing::Matches;
+
+MATCHER_P(FrameWithId, id, "") {
+ return Matches(Eq(id))(arg->Id());
+}
+
+TEST(FrameBuffer3Test, RejectInvalidRefs) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ // Ref must be less than the id of this frame.
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(0).Id(0).Refs({0}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
+
+ // Duplicate ids are also invalid.
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1, 1}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
+}
+
+TEST(FrameBuffer3Test, LastContinuousUpdatesOnInsertedFrames) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
+
+ EXPECT_TRUE(
+ buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(2));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
+}
+
+TEST(FrameBuffer3Test, LastContinuousFrameReordering) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(3));
+}
+
+TEST(FrameBuffer3Test, LastContinuousTemporalUnit) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(
+ buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
+}
+
+TEST(FrameBuffer3Test, LastContinuousTemporalUnitReordering) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(
+ buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(3).Refs({1}).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(4).Refs({2, 3}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(4));
+}
+
+TEST(FrameBuffer3Test, NextDecodable) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo(), Eq(absl::nullopt));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
+}
+
+TEST(FrameBuffer3Test, AdvanceNextDecodableOnExtraction) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
+
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(1)));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(20U));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(2)));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(30U));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+}
+
+TEST(FrameBuffer3Test, AdvanceLastDecodableOnExtraction) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->last_rtp_timestamp, Eq(10U));
+
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(1)));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->last_rtp_timestamp, Eq(30U));
+}
+
+TEST(FrameBuffer3Test, FrameUpdatesNextDecodable) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).AsLast().Build()));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(20U));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
+}
+
+TEST(FrameBuffer3Test, KeyframeClearsFullBuffer) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/5, /*max_decode_history=*/10,
+ field_trials);
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(4).Refs({3}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(50).Id(5).Refs({4}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
+
+ // Frame buffer is full
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(60).Id(6).Refs({5}).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(70).Id(7).AsLast().Build()));
+ EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(7));
+}
+
+TEST(FrameBuffer3Test, DropNextDecodableTemporalUnit) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
+
+ buffer.ExtractNextDecodableTemporalUnit();
+ buffer.DropNextDecodableTemporalUnit();
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+}
+
+TEST(FrameBuffer3Test, OldFramesAreIgnored) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+
+ buffer.ExtractNextDecodableTemporalUnit();
+ buffer.ExtractNextDecodableTemporalUnit();
+
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+}
+
+TEST(FrameBuffer3Test, ReturnFullTemporalUnitKSVC) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_TRUE(
+ buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(1), FrameWithId(2), FrameWithId(3)));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(4).Refs({3}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(4)));
+}
+
+TEST(FrameBuffer3Test, InterleavedStream) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(4).Refs({2}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(50).Id(5).Refs({3}).AsLast().Build()));
+
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(1)));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(2)));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(4)));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(5)));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(70).Id(7).Refs({5}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(7)));
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(60).Id(6).Refs({4}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(90).Id(9).Refs({7}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(9)));
+}
+
+TEST(FrameBuffer3Test, LegacyFrameIdJumpBehavior) {
+ {
+ test::ScopedKeyValueConfig field_trials(
+ "WebRTC-LegacyFrameIdJumpBehavior/Disabled/");
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(3).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(2).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
+ }
+
+ {
+ // WebRTC-LegacyFrameIdJumpBehavior is disabled by default.
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(3).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(3)));
+ EXPECT_FALSE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(30).Id(2).Refs({1}).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(1).AsLast().Build()));
+ EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
+ ElementsAre(FrameWithId(1)));
+ }
+}
+
+TEST(FrameBuffer3Test, TotalNumberOfContinuousTemporalUnits) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(0));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).Build()));
+ EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(4).Refs({2}).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(5).Refs({3, 4}).AsLast().Build()));
+ EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
+
+ // Reordered
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(3));
+}
+
+TEST(FrameBuffer3Test, TotalNumberOfDroppedFrames) {
+ test::ScopedKeyValueConfig field_trials;
+ FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
+ field_trials);
+ EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
+
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(20).Id(3).Refs({2}).AsLast().Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(4).Refs({1}).Build()));
+ EXPECT_TRUE(buffer.InsertFrame(
+ test::FakeFrameBuilder().Time(40).Id(5).Refs({4}).AsLast().Build()));
+
+ buffer.ExtractNextDecodableTemporalUnit();
+ EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
+
+ buffer.DropNextDecodableTemporalUnit();
+ EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
+
+ buffer.ExtractNextDecodableTemporalUnit();
+ EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/hdr_metadata.cc b/third_party/libwebrtc/api/video/hdr_metadata.cc
new file mode 100644
index 0000000000..e2a669c98a
--- /dev/null
+++ b/third_party/libwebrtc/api/video/hdr_metadata.cc
@@ -0,0 +1,21 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/hdr_metadata.h"
+
+namespace webrtc {
+
+HdrMasteringMetadata::Chromaticity::Chromaticity() = default;
+
+HdrMasteringMetadata::HdrMasteringMetadata() = default;
+
+HdrMetadata::HdrMetadata() = default;
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/hdr_metadata.h b/third_party/libwebrtc/api/video/hdr_metadata.h
new file mode 100644
index 0000000000..e9001a2c80
--- /dev/null
+++ b/third_party/libwebrtc/api/video/hdr_metadata.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_HDR_METADATA_H_
+#define API_VIDEO_HDR_METADATA_H_
+
+namespace webrtc {
+
+// SMPTE ST 2086 mastering metadata,
+// see https://ieeexplore.ieee.org/document/8353899.
+struct HdrMasteringMetadata {
+ struct Chromaticity {
+ Chromaticity();
+
+ bool operator==(const Chromaticity& rhs) const {
+ return x == rhs.x && y == rhs.y;
+ }
+
+ bool Validate() const {
+ return x >= 0.0 && x <= 1.0 && y >= 0.0 && y <= 1.0;
+ }
+
+ // xy chromaticity coordinates must be calculated as specified in ISO
+ // 11664-3:2012 Section 7, and must be specified with four decimal places.
+ // The x coordinate should be in the range [0.0001, 0.7400] and the y
+ // coordinate should be in the range [0.0001, 0.8400]. Valid range [0.0000,
+ // 1.0000].
+ float x = 0.0f;
+ float y = 0.0f;
+ };
+
+ HdrMasteringMetadata();
+
+ bool operator==(const HdrMasteringMetadata& rhs) const {
+ return ((primary_r == rhs.primary_r) && (primary_g == rhs.primary_g) &&
+ (primary_b == rhs.primary_b) && (white_point == rhs.white_point) &&
+ (luminance_max == rhs.luminance_max) &&
+ (luminance_min == rhs.luminance_min));
+ }
+
+ bool Validate() const {
+ return luminance_max >= 0.0 && luminance_max <= 20000.0 &&
+ luminance_min >= 0.0 && luminance_min <= 5.0 &&
+ primary_r.Validate() && primary_g.Validate() &&
+ primary_b.Validate() && white_point.Validate();
+ }
+
+ // The nominal primaries of the mastering display.
+ Chromaticity primary_r;
+ Chromaticity primary_g;
+ Chromaticity primary_b;
+
+ // The nominal chromaticity of the white point of the mastering display.
+ Chromaticity white_point;
+
+ // The nominal maximum display luminance of the mastering display. Specified
+ // in the unit candela/m2. The value should be in the range [5, 10000] with
+ // zero decimal places. Valid range [0, 20000].
+ float luminance_max = 0.0f;
+
+ // The nominal minimum display luminance of the mastering display. Specified
+ // in the unit candela/m2. The value should be in the range [0.0001, 5.0000]
+ // with four decimal places. Valid range [0.0000, 5.0000].
+ float luminance_min = 0.0f;
+};
+
+// High dynamic range (HDR) metadata common for HDR10 and WebM/VP9-based HDR
+// formats. This struct replicates the HDRMetadata struct defined in
+// https://cs.chromium.org/chromium/src/media/base/hdr_metadata.h
+struct HdrMetadata {
+ HdrMetadata();
+
+ bool operator==(const HdrMetadata& rhs) const {
+ return (
+ (max_content_light_level == rhs.max_content_light_level) &&
+ (max_frame_average_light_level == rhs.max_frame_average_light_level) &&
+ (mastering_metadata == rhs.mastering_metadata));
+ }
+
+ bool Validate() const {
+ return max_content_light_level >= 0 && max_content_light_level <= 20000 &&
+ max_frame_average_light_level >= 0 &&
+ max_frame_average_light_level <= 20000 &&
+ mastering_metadata.Validate();
+ }
+
+ HdrMasteringMetadata mastering_metadata;
+ // Max content light level (CLL), i.e. maximum brightness level present in the
+ // stream, in nits. 1 nit = 1 candela/m2. Valid range [0, 20000].
+ int max_content_light_level = 0;
+ // Max frame-average light level (FALL), i.e. maximum average brightness of
+ // the brightest frame in the stream, in nits. Valid range [0, 20000].
+ int max_frame_average_light_level = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_HDR_METADATA_H_
diff --git a/third_party/libwebrtc/api/video/i010_buffer.cc b/third_party/libwebrtc/api/video/i010_buffer.cc
new file mode 100644
index 0000000000..d78e854eb9
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i010_buffer.cc
@@ -0,0 +1,213 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i010_buffer.h"
+
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+static const int kBytesPerPixel = 2;
+
+namespace webrtc {
+
+namespace {
+
+int I010DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return kBytesPerPixel *
+ (stride_y * height + (stride_u + stride_v) * ((height + 1) / 2));
+}
+
+} // namespace
+
+I010Buffer::I010Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint16_t*>(
+ AlignedMalloc(I010DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, (width + 1) / 2);
+ RTC_DCHECK_GE(stride_v, (width + 1) / 2);
+}
+
+I010Buffer::~I010Buffer() {}
+
+// static
+rtc::scoped_refptr<I010Buffer> I010Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I010Buffer>(width, height, width,
+ (width + 1) / 2, (width + 1) / 2);
+}
+
+// static
+rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
+ const I010BufferInterface& source) {
+ const int width = source.width();
+ const int height = source.height();
+ rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
+ int res = libyuv::I010Copy(
+ source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
+ source.DataV(), source.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), width, height);
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
+ const I420BufferInterface& source) {
+ const int width = source.width();
+ const int height = source.height();
+ rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
+ int res = libyuv::I420ToI010(
+ source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
+ source.DataV(), source.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), width, height);
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I010Buffer> I010Buffer::Rotate(
+ const I010BufferInterface& src,
+ VideoRotation rotation) {
+ if (rotation == webrtc::kVideoRotation_0)
+ return Copy(src);
+
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I010Buffer> buffer =
+ Create(rotated_width, rotated_height);
+
+ int res = libyuv::I010Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
+ src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), src.width(), src.height(),
+ static_cast<libyuv::RotationMode>(rotation));
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I010Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ int res = libyuv::I010ToI420(
+ DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
+ RTC_DCHECK_EQ(res, 0);
+
+ return i420_buffer;
+}
+
+int I010Buffer::width() const {
+ return width_;
+}
+
+int I010Buffer::height() const {
+ return height_;
+}
+
+const uint16_t* I010Buffer::DataY() const {
+ return data_.get();
+}
+const uint16_t* I010Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint16_t* I010Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2);
+}
+
+int I010Buffer::StrideY() const {
+ return stride_y_;
+}
+int I010Buffer::StrideU() const {
+ return stride_u_;
+}
+int I010Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint16_t* I010Buffer::MutableDataY() {
+ return const_cast<uint16_t*>(DataY());
+}
+uint16_t* I010Buffer::MutableDataU() {
+ return const_cast<uint16_t*>(DataU());
+}
+uint16_t* I010Buffer::MutableDataV() {
+ return const_cast<uint16_t*>(DataV());
+}
+
+void I010Buffer::CropAndScaleFrom(const I010BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ // Make sure offset is even so that u/v plane becomes aligned.
+ const int uv_offset_x = offset_x / 2;
+ const int uv_offset_y = offset_y / 2;
+ offset_x = uv_offset_x * 2;
+ offset_y = uv_offset_y * 2;
+
+ const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint16_t* u_plane =
+ src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
+ const uint16_t* v_plane =
+ src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
+ int res = libyuv::I420Scale_16(
+ y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
+ crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
+ StrideU(), MutableDataV(), StrideV(), width(), height(),
+ libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+void I010Buffer::ScaleFrom(const I010BufferInterface& src) {
+ CropAndScaleFrom(src, 0, 0, src.width(), src.height());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i010_buffer.h b/third_party/libwebrtc/api/video/i010_buffer.h
new file mode 100644
index 0000000000..11e0879fec
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i010_buffer.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I010_BUFFER_H_
+#define API_VIDEO_I010_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+
+namespace webrtc {
+
+// Plain I010 buffer in standard memory.
+class I010Buffer : public I010BufferInterface {
+ public:
+ // Create a new buffer.
+ static rtc::scoped_refptr<I010Buffer> Create(int width, int height);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I010Buffer> Copy(const I010BufferInterface& buffer);
+
+ // Convert and put I420 buffer into a new buffer.
+ static rtc::scoped_refptr<I010Buffer> Copy(const I420BufferInterface& buffer);
+
+ // Return a rotated copy of `src`.
+ static rtc::scoped_refptr<I010Buffer> Rotate(const I010BufferInterface& src,
+ VideoRotation rotation);
+
+ // VideoFrameBuffer implementation.
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ // PlanarYuv16BBuffer implementation.
+ int width() const override;
+ int height() const override;
+ const uint16_t* DataY() const override;
+ const uint16_t* DataU() const override;
+ const uint16_t* DataV() const override;
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint16_t* MutableDataY();
+ uint16_t* MutableDataU();
+ uint16_t* MutableDataV();
+
+ // Scale the cropped area of `src` to the size of `this` buffer, and
+ // write the result into `this`.
+ void CropAndScaleFrom(const I010BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ // Scale all of `src` to the size of `this` buffer, with no cropping.
+ void ScaleFrom(const I010BufferInterface& src);
+
+ protected:
+ I010Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+ ~I010Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I010_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/i210_buffer.cc b/third_party/libwebrtc/api/video/i210_buffer.cc
new file mode 100644
index 0000000000..c83c8a0c0b
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i210_buffer.cc
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i210_buffer.h"
+
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/i422_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+static const int kBytesPerPixel = 2;
+
+namespace webrtc {
+
+namespace {
+
+int I210DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return kBytesPerPixel *
+ (stride_y * height + stride_u * height + stride_v * height);
+}
+
+} // namespace
+
+I210Buffer::I210Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint16_t*>(
+ AlignedMalloc(I210DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, (width + 1) / 2);
+ RTC_DCHECK_GE(stride_v, (width + 1) / 2);
+}
+
+I210Buffer::~I210Buffer() {}
+
+// static
+rtc::scoped_refptr<I210Buffer> I210Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I210Buffer>(width, height, width,
+ (width + 1) / 2, (width + 1) / 2);
+}
+
+// static
+rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
+ const I210BufferInterface& source) {
+ const int width = source.width();
+ const int height = source.height();
+ rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
+ RTC_CHECK_EQ(
+ 0, libyuv::I210Copy(
+ source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
+ source.DataV(), source.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), width, height));
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
+ const I420BufferInterface& source) {
+ const int width = source.width();
+ const int height = source.height();
+ auto i422buffer = I422Buffer::Copy(source);
+ rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
+ RTC_CHECK_EQ(0, libyuv::I422ToI210(i422buffer->DataY(), i422buffer->StrideY(),
+ i422buffer->DataU(), i422buffer->StrideU(),
+ i422buffer->DataV(), i422buffer->StrideV(),
+ buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(),
+ width, height));
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I210Buffer> I210Buffer::Rotate(
+ const I210BufferInterface& src,
+ VideoRotation rotation) {
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I210Buffer> buffer =
+ I210Buffer::Create(rotated_width, rotated_height);
+
+ RTC_CHECK_EQ(0,
+ libyuv::I210Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
+ src.DataV(), src.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), src.width(),
+ src.height(), static_cast<libyuv::RotationMode>(rotation)));
+
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I210Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(),
+ width(), height());
+ return i420_buffer;
+}
+
+int I210Buffer::width() const {
+ return width_;
+}
+
+int I210Buffer::height() const {
+ return height_;
+}
+
+const uint16_t* I210Buffer::DataY() const {
+ return data_.get();
+}
+const uint16_t* I210Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint16_t* I210Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * height_;
+}
+
+int I210Buffer::StrideY() const {
+ return stride_y_;
+}
+int I210Buffer::StrideU() const {
+ return stride_u_;
+}
+int I210Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint16_t* I210Buffer::MutableDataY() {
+ return const_cast<uint16_t*>(DataY());
+}
+uint16_t* I210Buffer::MutableDataU() {
+ return const_cast<uint16_t*>(DataU());
+}
+uint16_t* I210Buffer::MutableDataV() {
+ return const_cast<uint16_t*>(DataV());
+}
+
+void I210Buffer::CropAndScaleFrom(const I210BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+ RTC_CHECK_GE(crop_width, 0);
+ RTC_CHECK_GE(crop_height, 0);
+
+ // Make sure offset is even so that u/v plane becomes aligned.
+ const int uv_offset_x = offset_x / 2;
+ const int uv_offset_y = offset_y;
+ offset_x = uv_offset_x * 2;
+
+ const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint16_t* u_plane =
+ src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
+ const uint16_t* v_plane =
+ src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
+ int res = libyuv::I422Scale_16(
+ y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
+ crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
+ StrideU(), MutableDataV(), StrideV(), width(), height(),
+ libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+void I210Buffer::ScaleFrom(const I210BufferInterface& src) {
+ CropAndScaleFrom(src, 0, 0, src.width(), src.height());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i210_buffer.h b/third_party/libwebrtc/api/video/i210_buffer.h
new file mode 100644
index 0000000000..e3b6452b95
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i210_buffer.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I210_BUFFER_H_
+#define API_VIDEO_I210_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+
+namespace webrtc {
+
+// Plain I210 (yuv 422 planar 10 bits) buffer in standard memory.
+class I210Buffer : public I210BufferInterface {
+ public:
+ // Create a new buffer.
+ static rtc::scoped_refptr<I210Buffer> Create(int width, int height);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I210Buffer> Copy(const I210BufferInterface& buffer);
+
+ // Convert and put I420 buffer into a new buffer.
+ static rtc::scoped_refptr<I210Buffer> Copy(const I420BufferInterface& buffer);
+
+ // Return a rotated copy of `src`.
+ static rtc::scoped_refptr<I210Buffer> Rotate(const I210BufferInterface& src,
+ VideoRotation rotation);
+
+ // VideoFrameBuffer implementation.
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ // PlanarYuv16BBuffer implementation.
+ int width() const override;
+ int height() const override;
+ const uint16_t* DataY() const override;
+ const uint16_t* DataU() const override;
+ const uint16_t* DataV() const override;
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint16_t* MutableDataY();
+ uint16_t* MutableDataU();
+ uint16_t* MutableDataV();
+
+ // Scale the cropped area of `src` to the size of `this` buffer, and
+ // write the result into `this`.
+ void CropAndScaleFrom(const I210BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ // Scale all of `src` to the size of `this` buffer, with no cropping.
+ void ScaleFrom(const I210BufferInterface& src);
+
+ protected:
+ I210Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+ ~I210Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I210_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/i410_buffer.cc b/third_party/libwebrtc/api/video/i410_buffer.cc
new file mode 100644
index 0000000000..1b0d4fdb5c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i410_buffer.cc
@@ -0,0 +1,221 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i410_buffer.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+static const int kBytesPerPixel = 2;
+
+namespace webrtc {
+
+namespace {
+
+int I410DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return kBytesPerPixel *
+ (stride_y * height + stride_u * height + stride_v * height);
+}
+
+} // namespace
+
+I410Buffer::I410Buffer(int width, int height)
+ : I410Buffer(width, height, width, width, width) {}
+
+I410Buffer::I410Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint16_t*>(
+ AlignedMalloc(I410DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, width);
+ RTC_DCHECK_GE(stride_v, width);
+}
+
+I410Buffer::~I410Buffer() {}
+
+// static
+rtc::scoped_refptr<I410Buffer> I410Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I410Buffer>(width, height);
+}
+
+// static
+rtc::scoped_refptr<I410Buffer> I410Buffer::Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v) {
+ return rtc::make_ref_counted<I410Buffer>(width, height, stride_y, stride_u,
+ stride_v);
+}
+
+// static
+rtc::scoped_refptr<I410Buffer> I410Buffer::Copy(
+ const I410BufferInterface& source) {
+ return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
+ source.DataU(), source.StrideU(), source.DataV(),
+ source.StrideV());
+}
+
+// static
+rtc::scoped_refptr<I410Buffer> I410Buffer::Copy(int width,
+ int height,
+ const uint16_t* data_y,
+ int stride_y,
+ const uint16_t* data_u,
+ int stride_u,
+ const uint16_t* data_v,
+ int stride_v) {
+ // Note: May use different strides than the input data.
+ rtc::scoped_refptr<I410Buffer> buffer = Create(width, height);
+ int res = libyuv::I410Copy(data_y, stride_y, data_u, stride_u, data_v,
+ stride_v, buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(),
+ buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), width, height);
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I410Buffer> I410Buffer::Rotate(
+ const I410BufferInterface& src,
+ VideoRotation rotation) {
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I410Buffer> buffer =
+ I410Buffer::Create(rotated_width, rotated_height);
+
+ int res = libyuv::I410Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
+ src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), src.width(), src.height(),
+ static_cast<libyuv::RotationMode>(rotation));
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I410Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ int res = libyuv::I410ToI420(
+ DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
+ RTC_DCHECK_EQ(res, 0);
+
+ return i420_buffer;
+}
+
+void I410Buffer::InitializeData() {
+ memset(data_.get(), 0,
+ I410DataSize(height_, stride_y_, stride_u_, stride_v_));
+}
+
+int I410Buffer::width() const {
+ return width_;
+}
+
+int I410Buffer::height() const {
+ return height_;
+}
+
+const uint16_t* I410Buffer::DataY() const {
+ return data_.get();
+}
+const uint16_t* I410Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint16_t* I410Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * height_;
+}
+
+int I410Buffer::StrideY() const {
+ return stride_y_;
+}
+int I410Buffer::StrideU() const {
+ return stride_u_;
+}
+int I410Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint16_t* I410Buffer::MutableDataY() {
+ return const_cast<uint16_t*>(DataY());
+}
+uint16_t* I410Buffer::MutableDataU() {
+ return const_cast<uint16_t*>(DataU());
+}
+uint16_t* I410Buffer::MutableDataV() {
+ return const_cast<uint16_t*>(DataV());
+}
+
+void I410Buffer::CropAndScaleFrom(const I410BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint16_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x;
+ const uint16_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x;
+ int res = libyuv::I444Scale_16(
+ y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
+ crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
+ StrideU(), MutableDataV(), StrideV(), width(), height(),
+ libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+void I410Buffer::ScaleFrom(const I410BufferInterface& src) {
+ CropAndScaleFrom(src, 0, 0, src.width(), src.height());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i410_buffer.h b/third_party/libwebrtc/api/video/i410_buffer.h
new file mode 100644
index 0000000000..1c0cd86c12
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i410_buffer.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I410_BUFFER_H_
+#define API_VIDEO_I410_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+
+namespace webrtc {
+
+// Plain I410 (yuv 444 planar 10 bits) buffer in standard memory.
+class RTC_EXPORT I410Buffer : public I410BufferInterface {
+ public:
+ static rtc::scoped_refptr<I410Buffer> Create(int width, int height);
+ static rtc::scoped_refptr<I410Buffer> Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I410Buffer> Copy(const I410BufferInterface& buffer);
+
+ static rtc::scoped_refptr<I410Buffer> Copy(int width,
+ int height,
+ const uint16_t* data_y,
+ int stride_y,
+ const uint16_t* data_u,
+ int stride_u,
+ const uint16_t* data_v,
+ int stride_v);
+
+ // Returns a rotated copy of |src|.
+ static rtc::scoped_refptr<I410Buffer> Rotate(const I410BufferInterface& src,
+ VideoRotation rotation);
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+ const I420BufferInterface* GetI420() const final { return nullptr; }
+
+ // Sets all three planes to all zeros. Used to work around for
+ // quirks in memory checkers
+ // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
+ // ffmpeg (http://crbug.com/390941).
+ // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
+ // issues are resolved in a better way. Or in the mean time, use SetBlack.
+ void InitializeData();
+
+ int width() const override;
+ int height() const override;
+ const uint16_t* DataY() const override;
+ const uint16_t* DataU() const override;
+ const uint16_t* DataV() const override;
+
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint16_t* MutableDataY();
+ uint16_t* MutableDataU();
+ uint16_t* MutableDataV();
+
+ // Scale the cropped area of |src| to the size of |this| buffer, and
+ // write the result into |this|.
+ void CropAndScaleFrom(const I410BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ // Scale all of `src` to the size of `this` buffer, with no cropping.
+ void ScaleFrom(const I410BufferInterface& src);
+
+ protected:
+ I410Buffer(int width, int height);
+ I410Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+
+ ~I410Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I410_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/i420_buffer.cc b/third_party/libwebrtc/api/video/i420_buffer.cc
new file mode 100644
index 0000000000..bf7fc06ee9
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i420_buffer.cc
@@ -0,0 +1,232 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i420_buffer.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+namespace webrtc {
+
+namespace {
+
+int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2);
+}
+
+} // namespace
+
+I420Buffer::I420Buffer(int width, int height)
+ : I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
+
+I420Buffer::I420Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint8_t*>(
+ AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, (width + 1) / 2);
+ RTC_DCHECK_GE(stride_v, (width + 1) / 2);
+}
+
+I420Buffer::~I420Buffer() {}
+
+// static
+rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I420Buffer>(width, height);
+}
+
+// static
+rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v) {
+ return rtc::make_ref_counted<I420Buffer>(width, height, stride_y, stride_u,
+ stride_v);
+}
+
+// static
+rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
+ const I420BufferInterface& source) {
+ return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
+ source.DataU(), source.StrideU(), source.DataV(),
+ source.StrideV());
+}
+
+// static
+rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v) {
+ // Note: May use different strides than the input data.
+ rtc::scoped_refptr<I420Buffer> buffer = Create(width, height);
+ RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v,
+ stride_v, buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(),
+ buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), width, height));
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I420Buffer> I420Buffer::Rotate(
+ const I420BufferInterface& src,
+ VideoRotation rotation) {
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ I420Buffer::Create(rotated_width, rotated_height);
+
+ RTC_CHECK_EQ(0,
+ libyuv::I420Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
+ src.DataV(), src.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), src.width(),
+ src.height(), static_cast<libyuv::RotationMode>(rotation)));
+
+ return buffer;
+}
+
+void I420Buffer::InitializeData() {
+ memset(data_.get(), 0,
+ I420DataSize(height_, stride_y_, stride_u_, stride_v_));
+}
+
+int I420Buffer::width() const {
+ return width_;
+}
+
+int I420Buffer::height() const {
+ return height_;
+}
+
+const uint8_t* I420Buffer::DataY() const {
+ return data_.get();
+}
+const uint8_t* I420Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint8_t* I420Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2);
+}
+
+int I420Buffer::StrideY() const {
+ return stride_y_;
+}
+int I420Buffer::StrideU() const {
+ return stride_u_;
+}
+int I420Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint8_t* I420Buffer::MutableDataY() {
+ return const_cast<uint8_t*>(DataY());
+}
+uint8_t* I420Buffer::MutableDataU() {
+ return const_cast<uint8_t*>(DataU());
+}
+uint8_t* I420Buffer::MutableDataV() {
+ return const_cast<uint8_t*>(DataV());
+}
+
+// static
+void I420Buffer::SetBlack(I420Buffer* buffer) {
+ RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), 0, 0,
+ buffer->width(), buffer->height(), 0, 128,
+ 128) == 0);
+}
+
+void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ // Make sure offset is even so that u/v plane becomes aligned.
+ const int uv_offset_x = offset_x / 2;
+ const int uv_offset_y = offset_y / 2;
+ offset_x = uv_offset_x * 2;
+ offset_y = uv_offset_y * 2;
+
+ const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint8_t* u_plane =
+ src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
+ const uint8_t* v_plane =
+ src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
+ int res =
+ libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
+ src.StrideV(), crop_width, crop_height, MutableDataY(),
+ StrideY(), MutableDataU(), StrideU(), MutableDataV(),
+ StrideV(), width(), height(), libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) {
+ const int crop_width =
+ height() > 0 ? std::min(src.width(), width() * src.height() / height())
+ : src.width();
+ const int crop_height =
+ width() > 0 ? std::min(src.height(), height() * src.width() / width())
+ : src.height();
+
+ CropAndScaleFrom(src, (src.width() - crop_width) / 2,
+ (src.height() - crop_height) / 2, crop_width, crop_height);
+}
+
+void I420Buffer::ScaleFrom(const I420BufferInterface& src) {
+ CropAndScaleFrom(src, 0, 0, src.width(), src.height());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i420_buffer.h b/third_party/libwebrtc/api/video/i420_buffer.h
new file mode 100644
index 0000000000..b337489657
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i420_buffer.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I420_BUFFER_H_
+#define API_VIDEO_I420_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Plain I420 buffer in standard memory.
+class RTC_EXPORT I420Buffer : public I420BufferInterface {
+ public:
+ static rtc::scoped_refptr<I420Buffer> Create(int width, int height);
+ static rtc::scoped_refptr<I420Buffer> Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I420Buffer> Copy(const I420BufferInterface& buffer);
+ // Deprecated.
+ static rtc::scoped_refptr<I420Buffer> Copy(const VideoFrameBuffer& buffer) {
+ return Copy(*buffer.GetI420());
+ }
+
+ static rtc::scoped_refptr<I420Buffer> Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v);
+
+ // Returns a rotated copy of `src`.
+ static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src,
+ VideoRotation rotation);
+ // Deprecated.
+ static rtc::scoped_refptr<I420Buffer> Rotate(const VideoFrameBuffer& src,
+ VideoRotation rotation) {
+ return Rotate(*src.GetI420(), rotation);
+ }
+
+ // Sets the buffer to all black.
+ static void SetBlack(I420Buffer* buffer);
+
+ // Sets all three planes to all zeros. Used to work around for
+ // quirks in memory checkers
+ // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
+ // ffmpeg (http://crbug.com/390941).
+ // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
+ // issues are resolved in a better way. Or in the mean time, use SetBlack.
+ void InitializeData();
+
+ int width() const override;
+ int height() const override;
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint8_t* MutableDataY();
+ uint8_t* MutableDataU();
+ uint8_t* MutableDataV();
+
+ // Scale the cropped area of `src` to the size of `this` buffer, and
+ // write the result into `this`.
+ void CropAndScaleFrom(const I420BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ // The common case of a center crop, when needed to adjust the
+ // aspect ratio without distorting the image.
+ void CropAndScaleFrom(const I420BufferInterface& src);
+
+ // Scale all of `src` to the size of `this` buffer, with no cropping.
+ void ScaleFrom(const I420BufferInterface& src);
+
+ protected:
+ I420Buffer(int width, int height);
+ I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+
+ ~I420Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I420_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/i422_buffer.cc b/third_party/libwebrtc/api/video/i422_buffer.cc
new file mode 100644
index 0000000000..fddc1b57fd
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i422_buffer.cc
@@ -0,0 +1,237 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i422_buffer.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+namespace webrtc {
+
+namespace {
+
+int I422DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return stride_y * height + stride_u * height + stride_v * height;
+}
+} // namespace
+
+I422Buffer::I422Buffer(int width, int height)
+ : I422Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
+
+I422Buffer::I422Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint8_t*>(
+ AlignedMalloc(I422DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, (width + 1) / 2);
+ RTC_DCHECK_GE(stride_v, (width + 1) / 2);
+}
+
+I422Buffer::~I422Buffer() {}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I422Buffer>(width, height);
+}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v) {
+ return rtc::make_ref_counted<I422Buffer>(width, height, stride_y, stride_u,
+ stride_v);
+}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(
+ const I422BufferInterface& source) {
+ return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
+ source.DataU(), source.StrideU(), source.DataV(),
+ source.StrideV());
+}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(
+ const I420BufferInterface& source) {
+ const int width = source.width();
+ const int height = source.height();
+ rtc::scoped_refptr<I422Buffer> buffer = Create(width, height);
+ int res = libyuv::I420ToI422(
+ source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
+ source.DataV(), source.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), width, height);
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v) {
+ // Note: May use different strides than the input data.
+ rtc::scoped_refptr<I422Buffer> buffer = Create(width, height);
+ int res = libyuv::I422Copy(data_y, stride_y, data_u, stride_u, data_v,
+ stride_v, buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(),
+ buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), width, height);
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I422Buffer> I422Buffer::Rotate(
+ const I422BufferInterface& src,
+ VideoRotation rotation) {
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I422Buffer> buffer =
+ I422Buffer::Create(rotated_width, rotated_height);
+
+ int res = libyuv::I422Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
+ src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
+ buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), src.width(), src.height(),
+ static_cast<libyuv::RotationMode>(rotation));
+ RTC_DCHECK_EQ(res, 0);
+
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I422Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ int res = libyuv::I422ToI420(
+ DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
+ RTC_DCHECK_EQ(res, 0);
+
+ return i420_buffer;
+}
+
+void I422Buffer::InitializeData() {
+ memset(data_.get(), 0,
+ I422DataSize(height_, stride_y_, stride_u_, stride_v_));
+}
+
+int I422Buffer::width() const {
+ return width_;
+}
+
+int I422Buffer::height() const {
+ return height_;
+}
+
+const uint8_t* I422Buffer::DataY() const {
+ return data_.get();
+}
+const uint8_t* I422Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint8_t* I422Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * height_;
+}
+
+int I422Buffer::StrideY() const {
+ return stride_y_;
+}
+int I422Buffer::StrideU() const {
+ return stride_u_;
+}
+int I422Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint8_t* I422Buffer::MutableDataY() {
+ return const_cast<uint8_t*>(DataY());
+}
+uint8_t* I422Buffer::MutableDataU() {
+ return const_cast<uint8_t*>(DataU());
+}
+uint8_t* I422Buffer::MutableDataV() {
+ return const_cast<uint8_t*>(DataV());
+}
+
+void I422Buffer::CropAndScaleFrom(const I422BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ // Make sure offset is even so that u/v plane becomes aligned.
+ const int uv_offset_x = offset_x / 2;
+ const int uv_offset_y = offset_y;
+ offset_x = uv_offset_x * 2;
+
+ const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint8_t* u_plane =
+ src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
+ const uint8_t* v_plane =
+ src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
+
+ int res =
+ libyuv::I422Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
+ src.StrideV(), crop_width, crop_height, MutableDataY(),
+ StrideY(), MutableDataU(), StrideU(), MutableDataV(),
+ StrideV(), width(), height(), libyuv::kFilterBox);
+ RTC_DCHECK_EQ(res, 0);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i422_buffer.h b/third_party/libwebrtc/api/video/i422_buffer.h
new file mode 100644
index 0000000000..600b4ecea7
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i422_buffer.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I422_BUFFER_H_
+#define API_VIDEO_I422_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Plain I422 buffer in standard memory.
+class RTC_EXPORT I422Buffer : public I422BufferInterface {
+ public:
+ static rtc::scoped_refptr<I422Buffer> Create(int width, int height);
+ static rtc::scoped_refptr<I422Buffer> Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I422Buffer> Copy(const I422BufferInterface& buffer);
+ /// Convert and put I420 buffer into a new buffer.
+ static rtc::scoped_refptr<I422Buffer> Copy(const I420BufferInterface& buffer);
+
+ static rtc::scoped_refptr<I422Buffer> Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v);
+
+ // Returns a rotated copy of `src`.
+ static rtc::scoped_refptr<I422Buffer> Rotate(const I422BufferInterface& src,
+ VideoRotation rotation);
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+ const I420BufferInterface* GetI420() const final { return nullptr; }
+
+ // Sets the buffer to all black.
+ static void SetBlack(I422Buffer* buffer);
+
+ // Sets all three planes to all zeros. Used to work around for
+ // quirks in memory checkers
+ // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
+ // ffmpeg (http://crbug.com/390941).
+ // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
+ // issues are resolved in a better way. Or in the mean time, use SetBlack.
+ void InitializeData();
+
+ int width() const override;
+ int height() const override;
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint8_t* MutableDataY();
+ uint8_t* MutableDataU();
+ uint8_t* MutableDataV();
+
+ // Scale the cropped area of `src` to the size of `this` buffer, and
+ // write the result into `this`.
+ void CropAndScaleFrom(const I422BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ // The common case of a center crop, when needed to adjust the
+ // aspect ratio without distorting the image.
+ void CropAndScaleFrom(const I422BufferInterface& src);
+
+ // Scale all of `src` to the size of `this` buffer, with no cropping.
+ void ScaleFrom(const I422BufferInterface& src);
+
+ protected:
+ I422Buffer(int width, int height);
+ I422Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+
+ ~I422Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I422_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/i444_buffer.cc b/third_party/libwebrtc/api/video/i444_buffer.cc
new file mode 100644
index 0000000000..98e892308f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i444_buffer.cc
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "api/video/i444_buffer.h"
+
+#include <string.h>
+
+#include <algorithm>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+namespace webrtc {
+
+namespace {
+
+int I444DataSize(int height, int stride_y, int stride_u, int stride_v) {
+ return stride_y * height + stride_u * height + stride_v * height;
+}
+
+} // namespace
+
+I444Buffer::I444Buffer(int width, int height)
+ : I444Buffer(width, height, width, (width), (width)) {}
+
+I444Buffer::I444Buffer(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_u_(stride_u),
+ stride_v_(stride_v),
+ data_(static_cast<uint8_t*>(
+ AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_u, (width));
+ RTC_DCHECK_GE(stride_v, (width));
+}
+
+I444Buffer::~I444Buffer() {}
+
+// static
+rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<I444Buffer>(width, height);
+}
+
+// static
+rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v) {
+ return rtc::make_ref_counted<I444Buffer>(width, height, stride_y, stride_u,
+ stride_v);
+}
+
+// static
+rtc::scoped_refptr<I444Buffer> I444Buffer::Copy(
+ const I444BufferInterface& source) {
+ return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
+ source.DataU(), source.StrideU(), source.DataV(),
+ source.StrideV());
+}
+
+// static
+rtc::scoped_refptr<I444Buffer> I444Buffer::Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v) {
+ // Note: May use different strides than the input data.
+ rtc::scoped_refptr<I444Buffer> buffer = Create(width, height);
+ RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v,
+ stride_v, buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(),
+ buffer->StrideU(), buffer->MutableDataV(),
+ buffer->StrideV(), width, height));
+ return buffer;
+}
+
+// static
+rtc::scoped_refptr<I444Buffer> I444Buffer::Rotate(
+ const I444BufferInterface& src,
+ VideoRotation rotation) {
+ RTC_CHECK(src.DataY());
+ RTC_CHECK(src.DataU());
+ RTC_CHECK(src.DataV());
+
+ int rotated_width = src.width();
+ int rotated_height = src.height();
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::I444Buffer> buffer =
+ I444Buffer::Create(rotated_width, rotated_height);
+
+ RTC_CHECK_EQ(0,
+ libyuv::I444Rotate(
+ src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
+ src.DataV(), src.StrideV(), buffer->MutableDataY(),
+ buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
+ buffer->MutableDataV(), buffer->StrideV(), src.width(),
+ src.height(), static_cast<libyuv::RotationMode>(rotation)));
+
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I444Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(),
+ width(), height());
+ return i420_buffer;
+}
+
+void I444Buffer::InitializeData() {
+ memset(data_.get(), 0,
+ I444DataSize(height_, stride_y_, stride_u_, stride_v_));
+}
+
+int I444Buffer::width() const {
+ return width_;
+}
+
+int I444Buffer::height() const {
+ return height_;
+}
+
+const uint8_t* I444Buffer::DataY() const {
+ return data_.get();
+}
+const uint8_t* I444Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint8_t* I444Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * ((height_));
+}
+
+int I444Buffer::StrideY() const {
+ return stride_y_;
+}
+int I444Buffer::StrideU() const {
+ return stride_u_;
+}
+int I444Buffer::StrideV() const {
+ return stride_v_;
+}
+
+uint8_t* I444Buffer::MutableDataY() {
+ return const_cast<uint8_t*>(DataY());
+}
+uint8_t* I444Buffer::MutableDataU() {
+ return const_cast<uint8_t*>(DataU());
+}
+uint8_t* I444Buffer::MutableDataV() {
+ return const_cast<uint8_t*>(DataV());
+}
+
+void I444Buffer::CropAndScaleFrom(const I444BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint8_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x;
+ const uint8_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x;
+ int res =
+ libyuv::I444Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
+ src.StrideV(), crop_width, crop_height, MutableDataY(),
+ StrideY(), MutableDataU(), StrideU(), MutableDataV(),
+ StrideV(), width(), height(), libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/i444_buffer.h b/third_party/libwebrtc/api/video/i444_buffer.h
new file mode 100644
index 0000000000..f1e3f63114
--- /dev/null
+++ b/third_party/libwebrtc/api/video/i444_buffer.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_I444_BUFFER_H_
+#define API_VIDEO_I444_BUFFER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/memory/aligned_malloc.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Plain I444 buffer in standard memory.
+// I444 represents an image with in YUV format withouth any chroma subsampling.
+// https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4
+class RTC_EXPORT I444Buffer : public I444BufferInterface {
+ public:
+ static rtc::scoped_refptr<I444Buffer> Create(int width, int height);
+ static rtc::scoped_refptr<I444Buffer> Create(int width,
+ int height,
+ int stride_y,
+ int stride_u,
+ int stride_v);
+
+ // Create a new buffer and copy the pixel data.
+ static rtc::scoped_refptr<I444Buffer> Copy(const I444BufferInterface& buffer);
+
+ static rtc::scoped_refptr<I444Buffer> Copy(int width,
+ int height,
+ const uint8_t* data_y,
+ int stride_y,
+ const uint8_t* data_u,
+ int stride_u,
+ const uint8_t* data_v,
+ int stride_v);
+
+ // Returns a rotated copy of |src|.
+ static rtc::scoped_refptr<I444Buffer> Rotate(const I444BufferInterface& src,
+ VideoRotation rotation);
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+ const I420BufferInterface* GetI420() const final { return nullptr; }
+
+ // Sets all three planes to all zeros. Used to work around for
+ // quirks in memory checkers
+ // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
+ // ffmpeg (http://crbug.com/390941).
+ // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
+ // issues are resolved in a better way. Or in the mean time, use SetBlack.
+ void InitializeData();
+
+ int width() const override;
+ int height() const override;
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
+ uint8_t* MutableDataY();
+ uint8_t* MutableDataU();
+ uint8_t* MutableDataV();
+
+ // Scale the cropped area of |src| to the size of |this| buffer, and
+ // write the result into |this|.
+ void CropAndScaleFrom(const I444BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ protected:
+ I444Buffer(int width, int height);
+ I444Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
+
+ ~I444Buffer() override;
+
+ private:
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_u_;
+ const int stride_v_;
+ const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_I444_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/nv12_buffer.cc b/third_party/libwebrtc/api/video/nv12_buffer.cc
new file mode 100644
index 0000000000..ca9dcd8677
--- /dev/null
+++ b/third_party/libwebrtc/api/video/nv12_buffer.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/nv12_buffer.h"
+
+#include "api/make_ref_counted.h"
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+namespace webrtc {
+
+namespace {
+
+static const int kBufferAlignment = 64;
+
+int NV12DataSize(int height, int stride_y, int stride_uv) {
+ return stride_y * height + stride_uv * ((height + 1) / 2);
+}
+
+} // namespace
+
+NV12Buffer::NV12Buffer(int width, int height)
+ : NV12Buffer(width, height, width, width + width % 2) {}
+
+NV12Buffer::NV12Buffer(int width, int height, int stride_y, int stride_uv)
+ : width_(width),
+ height_(height),
+ stride_y_(stride_y),
+ stride_uv_(stride_uv),
+ data_(static_cast<uint8_t*>(
+ AlignedMalloc(NV12DataSize(height_, stride_y_, stride_uv),
+ kBufferAlignment))) {
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+ RTC_DCHECK_GE(stride_y, width);
+ RTC_DCHECK_GE(stride_uv, (width + width % 2));
+}
+
+NV12Buffer::~NV12Buffer() = default;
+
+// static
+rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width, int height) {
+ return rtc::make_ref_counted<NV12Buffer>(width, height);
+}
+
+// static
+rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width,
+ int height,
+ int stride_y,
+ int stride_uv) {
+ return rtc::make_ref_counted<NV12Buffer>(width, height, stride_y, stride_uv);
+}
+
+// static
+rtc::scoped_refptr<NV12Buffer> NV12Buffer::Copy(
+ const I420BufferInterface& i420_buffer) {
+ rtc::scoped_refptr<NV12Buffer> buffer =
+ NV12Buffer::Create(i420_buffer.width(), i420_buffer.height());
+ libyuv::I420ToNV12(
+ i420_buffer.DataY(), i420_buffer.StrideY(), i420_buffer.DataU(),
+ i420_buffer.StrideU(), i420_buffer.DataV(), i420_buffer.StrideV(),
+ buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataUV(),
+ buffer->StrideUV(), buffer->width(), buffer->height());
+ return buffer;
+}
+
+rtc::scoped_refptr<I420BufferInterface> NV12Buffer::ToI420() {
+ rtc::scoped_refptr<I420Buffer> i420_buffer =
+ I420Buffer::Create(width(), height());
+ libyuv::NV12ToI420(DataY(), StrideY(), DataUV(), StrideUV(),
+ i420_buffer->MutableDataY(), i420_buffer->StrideY(),
+ i420_buffer->MutableDataU(), i420_buffer->StrideU(),
+ i420_buffer->MutableDataV(), i420_buffer->StrideV(),
+ width(), height());
+ return i420_buffer;
+}
+
+int NV12Buffer::width() const {
+ return width_;
+}
+int NV12Buffer::height() const {
+ return height_;
+}
+
+int NV12Buffer::StrideY() const {
+ return stride_y_;
+}
+int NV12Buffer::StrideUV() const {
+ return stride_uv_;
+}
+
+const uint8_t* NV12Buffer::DataY() const {
+ return data_.get();
+}
+
+const uint8_t* NV12Buffer::DataUV() const {
+ return data_.get() + UVOffset();
+}
+
+uint8_t* NV12Buffer::MutableDataY() {
+ return data_.get();
+}
+
+uint8_t* NV12Buffer::MutableDataUV() {
+ return data_.get() + UVOffset();
+}
+
+size_t NV12Buffer::UVOffset() const {
+ return stride_y_ * height_;
+}
+
+void NV12Buffer::InitializeData() {
+ memset(data_.get(), 0, NV12DataSize(height_, stride_y_, stride_uv_));
+}
+
+void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height) {
+ RTC_CHECK_LE(crop_width, src.width());
+ RTC_CHECK_LE(crop_height, src.height());
+ RTC_CHECK_LE(crop_width + offset_x, src.width());
+ RTC_CHECK_LE(crop_height + offset_y, src.height());
+ RTC_CHECK_GE(offset_x, 0);
+ RTC_CHECK_GE(offset_y, 0);
+
+ // Make sure offset is even so that u/v plane becomes aligned.
+ const int uv_offset_x = offset_x / 2;
+ const int uv_offset_y = offset_y / 2;
+ offset_x = uv_offset_x * 2;
+ offset_y = uv_offset_y * 2;
+
+ const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
+ const uint8_t* uv_plane =
+ src.DataUV() + src.StrideUV() * uv_offset_y + uv_offset_x * 2;
+
+ int res = libyuv::NV12Scale(y_plane, src.StrideY(), uv_plane, src.StrideUV(),
+ crop_width, crop_height, MutableDataY(),
+ StrideY(), MutableDataUV(), StrideUV(), width(),
+ height(), libyuv::kFilterBox);
+
+ RTC_DCHECK_EQ(res, 0);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/nv12_buffer.h b/third_party/libwebrtc/api/video/nv12_buffer.h
new file mode 100644
index 0000000000..46a85f82e1
--- /dev/null
+++ b/third_party/libwebrtc/api/video/nv12_buffer.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_NV12_BUFFER_H_
+#define API_VIDEO_NV12_BUFFER_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/memory/aligned_malloc.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// NV12 is a biplanar encoding format, with full-resolution Y and
+// half-resolution interleved UV. More information can be found at
+// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12.
+class RTC_EXPORT NV12Buffer : public NV12BufferInterface {
+ public:
+ static rtc::scoped_refptr<NV12Buffer> Create(int width, int height);
+ static rtc::scoped_refptr<NV12Buffer> Create(int width,
+ int height,
+ int stride_y,
+ int stride_uv);
+ static rtc::scoped_refptr<NV12Buffer> Copy(
+ const I420BufferInterface& i420_buffer);
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ int width() const override;
+ int height() const override;
+
+ int StrideY() const override;
+ int StrideUV() const override;
+
+ const uint8_t* DataY() const override;
+ const uint8_t* DataUV() const override;
+
+ uint8_t* MutableDataY();
+ uint8_t* MutableDataUV();
+
+ // Sets all three planes to all zeros. Used to work around for
+ // quirks in memory checkers
+ // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
+ // ffmpeg (http://crbug.com/390941).
+ // TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
+ // issues are resolved in a better way. Or in the mean time, use SetBlack.
+ void InitializeData();
+
+ // Scale the cropped area of `src` to the size of `this` buffer, and
+ // write the result into `this`.
+ void CropAndScaleFrom(const NV12BufferInterface& src,
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height);
+
+ protected:
+ NV12Buffer(int width, int height);
+ NV12Buffer(int width, int height, int stride_y, int stride_uv);
+
+ ~NV12Buffer() override;
+
+ private:
+ size_t UVOffset() const;
+
+ const int width_;
+ const int height_;
+ const int stride_y_;
+ const int stride_uv_;
+ const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_NV12_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/recordable_encoded_frame.h b/third_party/libwebrtc/api/video/recordable_encoded_frame.h
new file mode 100644
index 0000000000..47ea23f119
--- /dev/null
+++ b/third_party/libwebrtc/api/video/recordable_encoded_frame.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
+#define API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
+
+#include "api/array_view.h"
+#include "api/scoped_refptr.h"
+#include "api/units/timestamp.h"
+#include "api/video/color_space.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_codec_type.h"
+
+namespace webrtc {
+
+// Interface for accessing recordable elements of an encoded frame.
+class RecordableEncodedFrame {
+ public:
+ // Encoded resolution in pixels
+ // TODO(bugs.webrtc.org/12114) : remove in favor of Resolution.
+ struct EncodedResolution {
+ bool empty() const { return width == 0 && height == 0; }
+
+ unsigned width = 0;
+ unsigned height = 0;
+ };
+
+ virtual ~RecordableEncodedFrame() = default;
+
+ // Provides access to encoded data
+ virtual rtc::scoped_refptr<const EncodedImageBufferInterface> encoded_buffer()
+ const = 0;
+
+ // Optionally returns the colorspace of the encoded frame. This can differ
+ // from the eventually decoded frame's colorspace.
+ virtual absl::optional<webrtc::ColorSpace> color_space() const = 0;
+
+ // Returns the codec of the encoded frame
+ virtual VideoCodecType codec() const = 0;
+
+ // Returns whether the encoded frame is a key frame
+ virtual bool is_key_frame() const = 0;
+
+ // Returns the frame's encoded resolution. May be 0x0 if the frame
+ // doesn't contain resolution information
+ virtual EncodedResolution resolution() const = 0;
+
+ // Returns the computed render time
+ virtual Timestamp render_time() const = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
diff --git a/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build b/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build
new file mode 100644
index 0000000000..af294ef3ca
--- /dev/null
+++ b/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("recordable_encoded_frame_gn")
diff --git a/third_party/libwebrtc/api/video/render_resolution.h b/third_party/libwebrtc/api/video/render_resolution.h
new file mode 100644
index 0000000000..fcf4f122d6
--- /dev/null
+++ b/third_party/libwebrtc/api/video/render_resolution.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_RENDER_RESOLUTION_H_
+#define API_VIDEO_RENDER_RESOLUTION_H_
+
+namespace webrtc {
+
+// TODO(bugs.webrtc.org/12114) : remove in favor of Resolution.
+class RenderResolution {
+ public:
+ constexpr RenderResolution() = default;
+ constexpr RenderResolution(int width, int height)
+ : width_(width), height_(height) {}
+ RenderResolution(const RenderResolution&) = default;
+ RenderResolution& operator=(const RenderResolution&) = default;
+
+ friend bool operator==(const RenderResolution& lhs,
+ const RenderResolution& rhs) {
+ return lhs.width_ == rhs.width_ && lhs.height_ == rhs.height_;
+ }
+ friend bool operator!=(const RenderResolution& lhs,
+ const RenderResolution& rhs) {
+ return !(lhs == rhs);
+ }
+
+ constexpr bool Valid() const { return width_ > 0 && height_ > 0; }
+
+ constexpr int Width() const { return width_; }
+ constexpr int Height() const { return height_; }
+
+ private:
+ int width_ = 0;
+ int height_ = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_RENDER_RESOLUTION_H_
diff --git a/third_party/libwebrtc/api/video/render_resolution_gn/moz.build b/third_party/libwebrtc/api/video/render_resolution_gn/moz.build
new file mode 100644
index 0000000000..5f97a983f8
--- /dev/null
+++ b/third_party/libwebrtc/api/video/render_resolution_gn/moz.build
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("render_resolution_gn")
diff --git a/third_party/libwebrtc/api/video/resolution.h b/third_party/libwebrtc/api/video/resolution.h
new file mode 100644
index 0000000000..11ffef0b03
--- /dev/null
+++ b/third_party/libwebrtc/api/video/resolution.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_RESOLUTION_H_
+#define API_VIDEO_RESOLUTION_H_
+
+#include <utility>
+
+namespace webrtc {
+
+// A struct representing a video resolution in pixels.
+struct Resolution {
+ int width = 0;
+ int height = 0;
+
+ // Helper methods.
+ int PixelCount() const { return width * height; }
+ std::pair<int, int> ToPair() const { return std::make_pair(width, height); }
+};
+
+inline bool operator==(const Resolution& lhs, const Resolution& rhs) {
+ return lhs.width == rhs.width && lhs.height == rhs.height;
+}
+
+inline bool operator!=(const Resolution& lhs, const Resolution& rhs) {
+ return !(lhs == rhs);
+}
+
+} // namespace webrtc
+
+#endif // API_VIDEO_RESOLUTION_H_
diff --git a/third_party/libwebrtc/api/video/resolution_gn/moz.build b/third_party/libwebrtc/api/video/resolution_gn/moz.build
new file mode 100644
index 0000000000..deafa478dd
--- /dev/null
+++ b/third_party/libwebrtc/api/video/resolution_gn/moz.build
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("resolution_gn")
diff --git a/third_party/libwebrtc/api/video/rtp_video_frame_assembler.cc b/third_party/libwebrtc/api/video/rtp_video_frame_assembler.cc
new file mode 100644
index 0000000000..b748534f3f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/rtp_video_frame_assembler.cc
@@ -0,0 +1,340 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/rtp_video_frame_assembler.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h"
+#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
+#include "modules/video_coding/frame_object.h"
+#include "modules/video_coding/packet_buffer.h"
+#include "modules/video_coding/rtp_frame_reference_finder.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/sequence_number_unwrapper.h"
+
+namespace webrtc {
+namespace {
+std::unique_ptr<VideoRtpDepacketizer> CreateDepacketizer(
+ RtpVideoFrameAssembler::PayloadFormat payload_format) {
+ switch (payload_format) {
+ case RtpVideoFrameAssembler::kRaw:
+ return std::make_unique<VideoRtpDepacketizerRaw>();
+ case RtpVideoFrameAssembler::kH264:
+ return std::make_unique<VideoRtpDepacketizerH264>();
+ case RtpVideoFrameAssembler::kVp8:
+ return std::make_unique<VideoRtpDepacketizerVp8>();
+ case RtpVideoFrameAssembler::kVp9:
+ return std::make_unique<VideoRtpDepacketizerVp9>();
+ case RtpVideoFrameAssembler::kAv1:
+ return std::make_unique<VideoRtpDepacketizerAv1>();
+ case RtpVideoFrameAssembler::kGeneric:
+ return std::make_unique<VideoRtpDepacketizerGeneric>();
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+} // namespace
+
+class RtpVideoFrameAssembler::Impl {
+ public:
+ explicit Impl(std::unique_ptr<VideoRtpDepacketizer> depacketizer);
+ ~Impl() = default;
+
+ FrameVector InsertPacket(const RtpPacketReceived& packet);
+
+ private:
+ using RtpFrameVector =
+ absl::InlinedVector<std::unique_ptr<RtpFrameObject>, 3>;
+
+ RtpFrameVector AssembleFrames(
+ video_coding::PacketBuffer::InsertResult insert_result);
+ FrameVector FindReferences(RtpFrameVector frames);
+ FrameVector UpdateWithPadding(uint16_t seq_num);
+ bool ParseDependenciesDescriptorExtension(const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader& video_header);
+ bool ParseGenericDescriptorExtension(const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader& video_header);
+ void ClearOldData(uint16_t incoming_seq_num);
+
+ std::unique_ptr<FrameDependencyStructure> video_structure_;
+ SeqNumUnwrapper<uint16_t> frame_id_unwrapper_;
+ absl::optional<int64_t> video_structure_frame_id_;
+ std::unique_ptr<VideoRtpDepacketizer> depacketizer_;
+ video_coding::PacketBuffer packet_buffer_;
+ RtpFrameReferenceFinder reference_finder_;
+};
+
+RtpVideoFrameAssembler::Impl::Impl(
+ std::unique_ptr<VideoRtpDepacketizer> depacketizer)
+ : depacketizer_(std::move(depacketizer)),
+ packet_buffer_(/*start_buffer_size=*/2048, /*max_buffer_size=*/2048) {}
+
+RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::Impl::InsertPacket(
+ const RtpPacketReceived& rtp_packet) {
+ if (rtp_packet.payload_size() == 0) {
+ ClearOldData(rtp_packet.SequenceNumber());
+ return UpdateWithPadding(rtp_packet.SequenceNumber());
+ }
+
+ absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload =
+ depacketizer_->Parse(rtp_packet.PayloadBuffer());
+
+ if (parsed_payload == absl::nullopt) {
+ return {};
+ }
+
+ if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) {
+ if (!ParseDependenciesDescriptorExtension(rtp_packet,
+ parsed_payload->video_header)) {
+ return {};
+ }
+ } else if (rtp_packet.HasExtension<RtpGenericFrameDescriptorExtension00>()) {
+ if (!ParseGenericDescriptorExtension(rtp_packet,
+ parsed_payload->video_header)) {
+ return {};
+ }
+ }
+
+ parsed_payload->video_header.is_last_packet_in_frame |= rtp_packet.Marker();
+
+ auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
+ rtp_packet, parsed_payload->video_header);
+ packet->video_payload = std::move(parsed_payload->video_payload);
+
+ ClearOldData(rtp_packet.SequenceNumber());
+ return FindReferences(
+ AssembleFrames(packet_buffer_.InsertPacket(std::move(packet))));
+}
+
+void RtpVideoFrameAssembler::Impl::ClearOldData(uint16_t incoming_seq_num) {
+ constexpr uint16_t kOldSeqNumThreshold = 2000;
+ uint16_t old_seq_num = incoming_seq_num - kOldSeqNumThreshold;
+ packet_buffer_.ClearTo(old_seq_num);
+ reference_finder_.ClearTo(old_seq_num);
+}
+
+RtpVideoFrameAssembler::Impl::RtpFrameVector
+RtpVideoFrameAssembler::Impl::AssembleFrames(
+ video_coding::PacketBuffer::InsertResult insert_result) {
+ video_coding::PacketBuffer::Packet* first_packet = nullptr;
+ std::vector<rtc::ArrayView<const uint8_t>> payloads;
+ RtpFrameVector result;
+
+ for (auto& packet : insert_result.packets) {
+ if (packet->is_first_packet_in_frame()) {
+ first_packet = packet.get();
+ payloads.clear();
+ }
+ payloads.emplace_back(packet->video_payload);
+
+ if (packet->is_last_packet_in_frame()) {
+ rtc::scoped_refptr<EncodedImageBuffer> bitstream =
+ depacketizer_->AssembleFrame(payloads);
+
+ if (!bitstream) {
+ continue;
+ }
+
+ const video_coding::PacketBuffer::Packet& last_packet = *packet;
+ result.push_back(std::make_unique<RtpFrameObject>(
+ first_packet->seq_num, //
+ last_packet.seq_num, //
+ last_packet.marker_bit, //
+ /*times_nacked=*/0, //
+ /*first_packet_received_time=*/0, //
+ /*last_packet_received_time=*/0, //
+ first_packet->timestamp, //
+ /*ntp_time_ms=*/0, //
+ /*timing=*/VideoSendTiming(), //
+ first_packet->payload_type, //
+ first_packet->codec(), //
+ last_packet.video_header.rotation, //
+ last_packet.video_header.content_type, //
+ first_packet->video_header, //
+ last_packet.video_header.color_space, //
+ /*packet_infos=*/RtpPacketInfos(), //
+ std::move(bitstream)));
+ }
+ }
+
+ return result;
+}
+
+RtpVideoFrameAssembler::FrameVector
+RtpVideoFrameAssembler::Impl::FindReferences(RtpFrameVector frames) {
+ FrameVector res;
+ for (auto& frame : frames) {
+ auto complete_frames = reference_finder_.ManageFrame(std::move(frame));
+ for (std::unique_ptr<RtpFrameObject>& complete_frame : complete_frames) {
+ uint16_t rtp_seq_num_start = complete_frame->first_seq_num();
+ uint16_t rtp_seq_num_end = complete_frame->last_seq_num();
+ res.emplace_back(rtp_seq_num_start, rtp_seq_num_end,
+ std::move(complete_frame));
+ }
+ }
+ return res;
+}
+
+RtpVideoFrameAssembler::FrameVector
+RtpVideoFrameAssembler::Impl::UpdateWithPadding(uint16_t seq_num) {
+ auto res =
+ FindReferences(AssembleFrames(packet_buffer_.InsertPadding(seq_num)));
+ auto ref_finder_update = reference_finder_.PaddingReceived(seq_num);
+
+ for (std::unique_ptr<RtpFrameObject>& complete_frame : ref_finder_update) {
+ uint16_t rtp_seq_num_start = complete_frame->first_seq_num();
+ uint16_t rtp_seq_num_end = complete_frame->last_seq_num();
+ res.emplace_back(rtp_seq_num_start, rtp_seq_num_end,
+ std::move(complete_frame));
+ }
+
+ return res;
+}
+
+bool RtpVideoFrameAssembler::Impl::ParseDependenciesDescriptorExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader& video_header) {
+ webrtc::DependencyDescriptor dependency_descriptor;
+
+ if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>(
+ video_structure_.get(), &dependency_descriptor)) {
+ // Descriptor is either malformed, or the template referenced is not in
+ // the `video_structure_` currently being held.
+ // TODO(bugs.webrtc.org/10342): Improve packet reordering behavior.
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << " Failed to parse dependency descriptor.";
+ return false;
+ }
+
+ if (dependency_descriptor.attached_structure != nullptr &&
+ !dependency_descriptor.first_packet_in_frame) {
+ RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
+ << "Invalid dependency descriptor: structure "
+ "attached to non first packet of a frame.";
+ return false;
+ }
+
+ video_header.is_first_packet_in_frame =
+ dependency_descriptor.first_packet_in_frame;
+ video_header.is_last_packet_in_frame =
+ dependency_descriptor.last_packet_in_frame;
+
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number);
+ auto& generic_descriptor_info = video_header.generic.emplace();
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ dependency_descriptor.frame_dependencies.spatial_id;
+ generic_descriptor_info.temporal_index =
+ dependency_descriptor.frame_dependencies.temporal_id;
+
+ for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ for (int cdiff : dependency_descriptor.frame_dependencies.chain_diffs) {
+ generic_descriptor_info.chain_diffs.push_back(frame_id - cdiff);
+ }
+ generic_descriptor_info.decode_target_indications =
+ dependency_descriptor.frame_dependencies.decode_target_indications;
+ if (dependency_descriptor.resolution) {
+ video_header.width = dependency_descriptor.resolution->Width();
+ video_header.height = dependency_descriptor.resolution->Height();
+ }
+ if (dependency_descriptor.active_decode_targets_bitmask.has_value()) {
+ generic_descriptor_info.active_decode_targets =
+ *dependency_descriptor.active_decode_targets_bitmask;
+ }
+
+ // FrameDependencyStructure is sent in the dependency descriptor of the first
+ // packet of a key frame and is required to parse all subsequent packets until
+ // the next key frame.
+ if (dependency_descriptor.attached_structure) {
+ RTC_DCHECK(dependency_descriptor.first_packet_in_frame);
+ if (video_structure_frame_id_ > frame_id) {
+ RTC_LOG(LS_WARNING)
+ << "Arrived key frame with id " << frame_id << " and structure id "
+ << dependency_descriptor.attached_structure->structure_id
+ << " is older than the latest received key frame with id "
+ << *video_structure_frame_id_ << " and structure id "
+ << video_structure_->structure_id;
+ return false;
+ }
+ video_structure_ = std::move(dependency_descriptor.attached_structure);
+ video_structure_frame_id_ = frame_id;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ } else {
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ }
+ return true;
+}
+
+bool RtpVideoFrameAssembler::Impl::ParseGenericDescriptorExtension(
+ const RtpPacketReceived& rtp_packet,
+ RTPVideoHeader& video_header) {
+ RtpGenericFrameDescriptor generic_frame_descriptor;
+ if (!rtp_packet.GetExtension<RtpGenericFrameDescriptorExtension00>(
+ &generic_frame_descriptor)) {
+ return false;
+ }
+
+ video_header.is_first_packet_in_frame =
+ generic_frame_descriptor.FirstPacketInSubFrame();
+ video_header.is_last_packet_in_frame =
+ generic_frame_descriptor.LastPacketInSubFrame();
+
+ if (generic_frame_descriptor.FirstPacketInSubFrame()) {
+ video_header.frame_type =
+ generic_frame_descriptor.FrameDependenciesDiffs().empty()
+ ? VideoFrameType::kVideoFrameKey
+ : VideoFrameType::kVideoFrameDelta;
+
+ auto& generic_descriptor_info = video_header.generic.emplace();
+ int64_t frame_id =
+ frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId());
+ generic_descriptor_info.frame_id = frame_id;
+ generic_descriptor_info.spatial_index =
+ generic_frame_descriptor.SpatialLayer();
+ generic_descriptor_info.temporal_index =
+ generic_frame_descriptor.TemporalLayer();
+ for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) {
+ generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
+ }
+ }
+ video_header.width = generic_frame_descriptor.Width();
+ video_header.height = generic_frame_descriptor.Height();
+ return true;
+}
+
+RtpVideoFrameAssembler::RtpVideoFrameAssembler(PayloadFormat payload_format)
+ : impl_(std::make_unique<Impl>(CreateDepacketizer(payload_format))) {}
+
+RtpVideoFrameAssembler::~RtpVideoFrameAssembler() = default;
+
+RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::InsertPacket(
+ const RtpPacketReceived& packet) {
+ return impl_->InsertPacket(packet);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/rtp_video_frame_assembler.h b/third_party/libwebrtc/api/video/rtp_video_frame_assembler.h
new file mode 100644
index 0000000000..83162cb818
--- /dev/null
+++ b/third_party/libwebrtc/api/video/rtp_video_frame_assembler.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
+#define API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
+
+#include <cstdint>
+#include <memory>
+#include <utility>
+
+#include "absl/container/inlined_vector.h"
+#include "api/video/encoded_frame.h"
+#include "modules/rtp_rtcp/source/rtp_packet_received.h"
+
+namespace webrtc {
+// The RtpVideoFrameAssembler takes RtpPacketReceived and assembles them into
+// complete frames. A frame is considered complete when all packets of the frame
+// has been received, the bitstream data has successfully extracted, an ID has
+// been assigned, and all dependencies are known. Frame IDs are strictly
+// monotonic in decode order, dependencies are expressed as frame IDs.
+class RtpVideoFrameAssembler {
+ public:
+ // The RtpVideoFrameAssembler should return "RTP frames", but for now there
+ // is no good class for this purpose. For now return an EncodedFrame bundled
+ // with some minimal RTP information.
+ class AssembledFrame {
+ public:
+ AssembledFrame(uint16_t rtp_seq_num_start,
+ uint16_t rtp_seq_num_end,
+ std::unique_ptr<EncodedFrame> frame)
+ : rtp_seq_num_start_(rtp_seq_num_start),
+ rtp_seq_num_end_(rtp_seq_num_end),
+ frame_(std::move(frame)) {}
+
+ uint16_t RtpSeqNumStart() const { return rtp_seq_num_start_; }
+ uint16_t RtpSeqNumEnd() const { return rtp_seq_num_end_; }
+ std::unique_ptr<EncodedFrame> ExtractFrame() { return std::move(frame_); }
+
+ private:
+ uint16_t rtp_seq_num_start_;
+ uint16_t rtp_seq_num_end_;
+ std::unique_ptr<EncodedFrame> frame_;
+ };
+
+ // FrameVector is just a vector-like type of std::unique_ptr<EncodedFrame>.
+ // The vector type may change without notice.
+ using FrameVector = absl::InlinedVector<AssembledFrame, 3>;
+ enum PayloadFormat { kRaw, kH264, kVp8, kVp9, kAv1, kGeneric };
+
+ explicit RtpVideoFrameAssembler(PayloadFormat payload_format);
+ RtpVideoFrameAssembler(const RtpVideoFrameAssembler& other) = delete;
+ RtpVideoFrameAssembler& operator=(const RtpVideoFrameAssembler& other) =
+ delete;
+ ~RtpVideoFrameAssembler();
+
+ // Typically when a packet is inserted zero or one frame is completed. In the
+ // case of RTP packets being inserted out of order then sometime multiple
+ // frames could be completed from a single packet, hence the 'FrameVector'
+ // return type.
+ FrameVector InsertPacket(const RtpPacketReceived& packet);
+
+ private:
+ class Impl;
+ std::unique_ptr<Impl> impl_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
diff --git a/third_party/libwebrtc/api/video/rtp_video_frame_assembler_unittests.cc b/third_party/libwebrtc/api/video/rtp_video_frame_assembler_unittests.cc
new file mode 100644
index 0000000000..82defb8399
--- /dev/null
+++ b/third_party/libwebrtc/api/video/rtp_video_frame_assembler_unittests.cc
@@ -0,0 +1,583 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "api/array_view.h"
+#include "api/video/rtp_video_frame_assembler.h"
+#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_format.h"
+#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
+#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
+#include "modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+using ::testing::ElementsAreArray;
+using ::testing::Eq;
+using ::testing::IsEmpty;
+using ::testing::Matches;
+using ::testing::SizeIs;
+using ::testing::UnorderedElementsAre;
+using ::testing::UnorderedElementsAreArray;
+using PayloadFormat = RtpVideoFrameAssembler::PayloadFormat;
+
+class PacketBuilder {
+ public:
+ explicit PacketBuilder(PayloadFormat format)
+ : format_(format), packet_to_send_(&extension_manager_) {}
+
+ PacketBuilder& WithSeqNum(uint16_t seq_num) {
+ seq_num_ = seq_num;
+ return *this;
+ }
+
+ PacketBuilder& WithPayload(rtc::ArrayView<const uint8_t> payload) {
+ payload_.assign(payload.begin(), payload.end());
+ return *this;
+ }
+
+ PacketBuilder& WithVideoHeader(const RTPVideoHeader& video_header) {
+ video_header_ = video_header;
+ return *this;
+ }
+
+ template <typename T, typename... Args>
+ PacketBuilder& WithExtension(int id, const Args&... args) {
+ extension_manager_.Register<T>(id);
+ packet_to_send_.IdentifyExtensions(extension_manager_);
+ packet_to_send_.SetExtension<T>(std::forward<const Args>(args)...);
+ return *this;
+ }
+
+ RtpPacketReceived Build() {
+ auto packetizer =
+ RtpPacketizer::Create(GetVideoCodecType(), payload_, {}, video_header_);
+ packetizer->NextPacket(&packet_to_send_);
+ packet_to_send_.SetSequenceNumber(seq_num_);
+
+ RtpPacketReceived received(&extension_manager_);
+ received.Parse(packet_to_send_.Buffer());
+ return received;
+ }
+
+ private:
+ absl::optional<VideoCodecType> GetVideoCodecType() {
+ switch (format_) {
+ case PayloadFormat::kRaw: {
+ return absl::nullopt;
+ }
+ case PayloadFormat::kH264: {
+ return kVideoCodecH264;
+ }
+ case PayloadFormat::kVp8: {
+ return kVideoCodecVP8;
+ }
+ case PayloadFormat::kVp9: {
+ return kVideoCodecVP9;
+ }
+ case PayloadFormat::kAv1: {
+ return kVideoCodecAV1;
+ }
+ case PayloadFormat::kGeneric: {
+ return kVideoCodecGeneric;
+ }
+ }
+ RTC_DCHECK_NOTREACHED();
+ return absl::nullopt;
+ }
+
+ const RtpVideoFrameAssembler::PayloadFormat format_;
+ uint16_t seq_num_ = 0;
+ std::vector<uint8_t> payload_;
+ RTPVideoHeader video_header_;
+ RtpPacketReceived::ExtensionManager extension_manager_;
+ RtpPacketToSend packet_to_send_;
+};
+
+RtpPacketReceived PaddingPacket(uint16_t seq_num) {
+ RtpPacketReceived padding_packet;
+ padding_packet.SetSequenceNumber(seq_num);
+ padding_packet.SetPadding(224);
+ return padding_packet;
+}
+
+void AppendFrames(RtpVideoFrameAssembler::FrameVector from,
+ RtpVideoFrameAssembler::FrameVector& to) {
+ to.insert(to.end(), std::make_move_iterator(from.begin()),
+ std::make_move_iterator(from.end()));
+}
+
+rtc::ArrayView<int64_t> References(const std::unique_ptr<EncodedFrame>& frame) {
+ return rtc::MakeArrayView(frame->references, frame->num_references);
+}
+
+rtc::ArrayView<uint8_t> Payload(const std::unique_ptr<EncodedFrame>& frame) {
+ return rtc::ArrayView<uint8_t>(*frame->GetEncodedData());
+}
+
+TEST(RtpVideoFrameAssembler, Vp8Packetization) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kVp8);
+
+ // When sending VP8 over RTP parts of the payload is actually inspected at the
+ // RTP level. It just so happen that the initial 'V' sets the keyframe bit
+ // (0x01) to the correct value.
+ uint8_t kKeyframePayload[] = "Vp8Keyframe";
+ ASSERT_EQ(kKeyframePayload[0] & 0x01, 0);
+
+ uint8_t kDeltaframePayload[] = "SomeFrame";
+ ASSERT_EQ(kDeltaframePayload[0] & 0x01, 1);
+
+ RtpVideoFrameAssembler::FrameVector frames;
+
+ RTPVideoHeader video_header;
+ auto& vp8_header =
+ video_header.video_type_header.emplace<RTPVideoHeaderVP8>();
+
+ vp8_header.pictureId = 10;
+ vp8_header.tl0PicIdx = 0;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp8)
+ .WithPayload(kKeyframePayload)
+ .WithVideoHeader(video_header)
+ .Build()),
+ frames);
+
+ vp8_header.pictureId = 11;
+ vp8_header.tl0PicIdx = 1;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp8)
+ .WithPayload(kDeltaframePayload)
+ .WithVideoHeader(video_header)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(10));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kKeyframePayload));
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(11));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kDeltaframePayload));
+}
+
+TEST(RtpVideoFrameAssembler, Vp9Packetization) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kVp9);
+ RtpVideoFrameAssembler::FrameVector frames;
+
+ uint8_t kPayload[] = "SomePayload";
+
+ RTPVideoHeader video_header;
+ auto& vp9_header =
+ video_header.video_type_header.emplace<RTPVideoHeaderVP9>();
+ vp9_header.InitRTPVideoHeaderVP9();
+
+ vp9_header.picture_id = 10;
+ vp9_header.tl0_pic_idx = 0;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp9)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .Build()),
+ frames);
+
+ vp9_header.picture_id = 11;
+ vp9_header.tl0_pic_idx = 1;
+ vp9_header.inter_pic_predicted = true;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp9)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(10));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(11));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
+}
+
+TEST(RtpVideoFrameAssembler, Av1Packetization) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kAv1);
+ RtpVideoFrameAssembler::FrameVector frames;
+
+ auto kKeyframePayload =
+ BuildAv1Frame({Av1Obu(kAv1ObuTypeSequenceHeader).WithPayload({1, 2, 3}),
+ Av1Obu(kAv1ObuTypeFrame).WithPayload({4, 5, 6})});
+
+ auto kDeltaframePayload =
+ BuildAv1Frame({Av1Obu(kAv1ObuTypeFrame).WithPayload({7, 8, 9})});
+
+ RTPVideoHeader video_header;
+
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kAv1)
+ .WithPayload(kKeyframePayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(20)
+ .Build()),
+ frames);
+
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kAv1)
+ .WithPayload(kDeltaframePayload)
+ .WithSeqNum(21)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(20));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kKeyframePayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(21));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kDeltaframePayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(20));
+}
+
+TEST(RtpVideoFrameAssembler, RawPacketizationDependencyDescriptorExtension) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kRaw);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] = "SomePayload";
+
+ FrameDependencyStructure dependency_structure;
+ dependency_structure.num_decode_targets = 1;
+ dependency_structure.num_chains = 1;
+ dependency_structure.decode_target_protected_by_chain.push_back(0);
+ dependency_structure.templates.push_back(
+ FrameDependencyTemplate().S(0).T(0).Dtis("S").ChainDiffs({0}));
+ dependency_structure.templates.push_back(
+ FrameDependencyTemplate().S(0).T(0).Dtis("S").ChainDiffs({10}).FrameDiffs(
+ {10}));
+
+ DependencyDescriptor dependency_descriptor;
+
+ dependency_descriptor.frame_number = 10;
+ dependency_descriptor.frame_dependencies = dependency_structure.templates[0];
+ dependency_descriptor.attached_structure =
+ std::make_unique<FrameDependencyStructure>(dependency_structure);
+ AppendFrames(assembler.InsertPacket(
+ PacketBuilder(PayloadFormat::kRaw)
+ .WithPayload(kPayload)
+ .WithExtension<RtpDependencyDescriptorExtension>(
+ 1, dependency_structure, dependency_descriptor)
+ .Build()),
+ frames);
+
+ dependency_descriptor.frame_number = 20;
+ dependency_descriptor.frame_dependencies = dependency_structure.templates[1];
+ dependency_descriptor.attached_structure.reset();
+ AppendFrames(assembler.InsertPacket(
+ PacketBuilder(PayloadFormat::kRaw)
+ .WithPayload(kPayload)
+ .WithExtension<RtpDependencyDescriptorExtension>(
+ 1, dependency_structure, dependency_descriptor)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(10));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(20));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
+}
+
+TEST(RtpVideoFrameAssembler, RawPacketizationGenericDescriptor00Extension) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kRaw);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] = "SomePayload";
+
+ RtpGenericFrameDescriptor generic;
+
+ generic.SetFirstPacketInSubFrame(true);
+ generic.SetLastPacketInSubFrame(true);
+ generic.SetFrameId(100);
+ AppendFrames(
+ assembler.InsertPacket(
+ PacketBuilder(PayloadFormat::kRaw)
+ .WithPayload(kPayload)
+ .WithExtension<RtpGenericFrameDescriptorExtension00>(1, generic)
+ .Build()),
+ frames);
+
+ generic.SetFrameId(102);
+ generic.AddFrameDependencyDiff(2);
+ AppendFrames(
+ assembler.InsertPacket(
+ PacketBuilder(PayloadFormat::kRaw)
+ .WithPayload(kPayload)
+ .WithExtension<RtpGenericFrameDescriptorExtension00>(1, generic)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(100));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(102));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(100));
+}
+
+TEST(RtpVideoFrameAssembler, RawPacketizationGenericPayloadDescriptor) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] = "SomePayload";
+
+ RTPVideoHeader video_header;
+
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(123)
+ .Build()),
+ frames);
+
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(124)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(123));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(124));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(123));
+}
+
+TEST(RtpVideoFrameAssembler, Padding) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] = "SomePayload";
+
+ RTPVideoHeader video_header;
+
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(123)
+ .Build()),
+ frames);
+
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(125)
+ .Build()),
+ frames);
+
+ ASSERT_THAT(frames, SizeIs(1));
+ auto first_frame = frames[0].ExtractFrame();
+ EXPECT_THAT(first_frame->Id(), Eq(123));
+ EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(first_frame), IsEmpty());
+
+ AppendFrames(assembler.InsertPacket(PaddingPacket(/*seq_num=*/124)), frames);
+
+ ASSERT_THAT(frames, SizeIs(2));
+ auto second_frame = frames[1].ExtractFrame();
+ EXPECT_THAT(second_frame->Id(), Eq(125));
+ EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
+ EXPECT_THAT(References(second_frame), UnorderedElementsAre(123));
+}
+
+TEST(RtpVideoFrameAssembler, ClearOldPackets) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+
+ // If we don't have a payload the packet will be counted as a padding packet.
+ uint8_t kPayload[] = "DontCare";
+
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(0)
+ .Build()),
+ SizeIs(1));
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(2000)
+ .Build()),
+ SizeIs(1));
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(0)
+ .Build()),
+ SizeIs(0));
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(1)
+ .Build()),
+ SizeIs(1));
+}
+
+TEST(RtpVideoFrameAssembler, ClearOldPacketsWithPadding) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+ uint8_t kPayload[] = "DontCare";
+
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(0)
+ .Build()),
+ SizeIs(1));
+
+ EXPECT_THAT(assembler.InsertPacket(PaddingPacket(/*seq_num=*/2000)),
+ SizeIs(0));
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(0)
+ .Build()),
+ SizeIs(0));
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(1)
+ .Build()),
+ SizeIs(1));
+}
+
+TEST(RtpVideoFrameAssembler, SeqNumStartAndSeqNumEndSet) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] =
+ "Some payload that will get split into two when packetized.";
+
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = sizeof(kPayload) - 1;
+
+ auto packetizer =
+ RtpPacketizer::Create(kVideoCodecGeneric, kPayload, limits, video_header);
+ ASSERT_THAT(packetizer->NumPackets(), Eq(2U));
+
+ RtpPacketReceived::ExtensionManager extension_manager;
+ {
+ RtpPacketToSend send_packet(&extension_manager);
+ packetizer->NextPacket(&send_packet);
+ send_packet.SetSequenceNumber(123);
+ RtpPacketReceived received_packet(&extension_manager);
+ received_packet.Parse(send_packet.Buffer());
+ assembler.InsertPacket(received_packet);
+ }
+
+ {
+ RtpPacketToSend send_packet(&extension_manager);
+ packetizer->NextPacket(&send_packet);
+ send_packet.SetSequenceNumber(124);
+ RtpPacketReceived received_packet(&extension_manager);
+ received_packet.Parse(send_packet.Buffer());
+ AppendFrames(assembler.InsertPacket(received_packet), frames);
+ }
+
+ ASSERT_THAT(frames, SizeIs(1));
+ EXPECT_THAT(frames[0].RtpSeqNumStart(), Eq(123));
+ EXPECT_THAT(frames[0].RtpSeqNumEnd(), Eq(124));
+}
+
+TEST(RtpVideoFrameAssembler, SeqNumStartAndSeqNumEndSetWhenPaddingReceived) {
+ RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
+ RtpVideoFrameAssembler::FrameVector frames;
+ uint8_t kPayload[] =
+ "Some payload that will get split into two when packetized.";
+
+ RTPVideoHeader video_header;
+ video_header.frame_type = VideoFrameType::kVideoFrameKey;
+
+ EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
+ .WithPayload(kPayload)
+ .WithVideoHeader(video_header)
+ .WithSeqNum(121)
+ .Build()),
+ SizeIs(1));
+
+ video_header.frame_type = VideoFrameType::kVideoFrameDelta;
+ RtpPacketReceived::ExtensionManager extension_manager;
+ RtpPacketizer::PayloadSizeLimits limits;
+ limits.max_payload_len = sizeof(kPayload) - 1;
+
+ auto packetizer =
+ RtpPacketizer::Create(kVideoCodecGeneric, kPayload, limits, video_header);
+ ASSERT_THAT(packetizer->NumPackets(), Eq(2U));
+
+ {
+ RtpPacketToSend send_packet(&extension_manager);
+ packetizer->NextPacket(&send_packet);
+ send_packet.SetSequenceNumber(123);
+ RtpPacketReceived received_packet(&extension_manager);
+ received_packet.Parse(send_packet.Buffer());
+ assembler.InsertPacket(received_packet);
+ }
+
+ {
+ RtpPacketToSend send_packet(&extension_manager);
+ packetizer->NextPacket(&send_packet);
+ send_packet.SetSequenceNumber(124);
+ RtpPacketReceived received_packet(&extension_manager);
+ received_packet.Parse(send_packet.Buffer());
+ assembler.InsertPacket(received_packet);
+ }
+
+ AppendFrames(assembler.InsertPacket(PaddingPacket(/*seq_num=*/122)), frames);
+
+ ASSERT_THAT(frames, SizeIs(1));
+ EXPECT_THAT(frames[0].RtpSeqNumStart(), Eq(123));
+ EXPECT_THAT(frames[0].RtpSeqNumEnd(), Eq(124));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/BUILD.gn b/third_party/libwebrtc/api/video/test/BUILD.gn
new file mode 100644
index 0000000000..60ec4b852f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+rtc_library("rtc_api_video_unittests") {
+ testonly = true
+ sources = [
+ "color_space_unittest.cc",
+ "i210_buffer_unittest.cc",
+ "i410_buffer_unittest.cc",
+ "i422_buffer_unittest.cc",
+ "i444_buffer_unittest.cc",
+ "nv12_buffer_unittest.cc",
+ "video_adaptation_counters_unittest.cc",
+ "video_bitrate_allocation_unittest.cc",
+ ]
+ deps = [
+ "..:video_adaptation",
+ "..:video_bitrate_allocation",
+ "..:video_frame",
+ "..:video_frame_i010",
+ "..:video_rtp_headers",
+ "../../../test:frame_utils",
+ "../../../test:test_support",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_source_set("mock_recordable_encoded_frame") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [ "mock_recordable_encoded_frame.h" ]
+
+ deps = [
+ "..:recordable_encoded_frame",
+ "../../../test:test_support",
+ ]
+}
+
+rtc_source_set("video_frame_matchers") {
+ testonly = true
+ visibility = [ "*" ]
+ sources = [ "video_frame_matchers.h" ]
+
+ deps = [
+ "..:video_frame",
+ "../..:rtp_packet_info",
+ "../../../test:test_support",
+ ]
+}
diff --git a/third_party/libwebrtc/api/video/test/color_space_unittest.cc b/third_party/libwebrtc/api/video/test/color_space_unittest.cc
new file mode 100644
index 0000000000..1d8b3a87f6
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/color_space_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/color_space.h"
+
+#include <stdint.h>
+
+#include "test/gtest.h"
+
+namespace webrtc {
+TEST(ColorSpace, TestSettingPrimariesFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_primaries_from_uint8(
+ static_cast<uint8_t>(ColorSpace::PrimaryID::kBT470BG)));
+ EXPECT_EQ(ColorSpace::PrimaryID::kBT470BG, color_space.primaries());
+ EXPECT_FALSE(color_space.set_primaries_from_uint8(3));
+ EXPECT_FALSE(color_space.set_primaries_from_uint8(23));
+ EXPECT_FALSE(color_space.set_primaries_from_uint8(64));
+}
+
+TEST(ColorSpace, TestSettingTransferFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_transfer_from_uint8(
+ static_cast<uint8_t>(ColorSpace::TransferID::kBT2020_10)));
+ EXPECT_EQ(ColorSpace::TransferID::kBT2020_10, color_space.transfer());
+ EXPECT_FALSE(color_space.set_transfer_from_uint8(3));
+ EXPECT_FALSE(color_space.set_transfer_from_uint8(19));
+ EXPECT_FALSE(color_space.set_transfer_from_uint8(128));
+}
+
+TEST(ColorSpace, TestSettingMatrixFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_matrix_from_uint8(
+ static_cast<uint8_t>(ColorSpace::MatrixID::kCDNCLS)));
+ EXPECT_EQ(ColorSpace::MatrixID::kCDNCLS, color_space.matrix());
+ EXPECT_FALSE(color_space.set_matrix_from_uint8(3));
+ EXPECT_FALSE(color_space.set_matrix_from_uint8(15));
+ EXPECT_FALSE(color_space.set_matrix_from_uint8(255));
+}
+
+TEST(ColorSpace, TestSettingRangeFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_range_from_uint8(
+ static_cast<uint8_t>(ColorSpace::RangeID::kFull)));
+ EXPECT_EQ(ColorSpace::RangeID::kFull, color_space.range());
+ EXPECT_FALSE(color_space.set_range_from_uint8(4));
+}
+
+TEST(ColorSpace, TestSettingChromaSitingHorizontalFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_chroma_siting_horizontal_from_uint8(
+ static_cast<uint8_t>(ColorSpace::ChromaSiting::kCollocated)));
+ EXPECT_EQ(ColorSpace::ChromaSiting::kCollocated,
+ color_space.chroma_siting_horizontal());
+ EXPECT_FALSE(color_space.set_chroma_siting_horizontal_from_uint8(3));
+}
+
+TEST(ColorSpace, TestSettingChromaSitingVerticalFromUint8) {
+ ColorSpace color_space;
+ EXPECT_TRUE(color_space.set_chroma_siting_vertical_from_uint8(
+ static_cast<uint8_t>(ColorSpace::ChromaSiting::kHalf)));
+ EXPECT_EQ(ColorSpace::ChromaSiting::kHalf,
+ color_space.chroma_siting_vertical());
+ EXPECT_FALSE(color_space.set_chroma_siting_vertical_from_uint8(3));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/i210_buffer_unittest.cc b/third_party/libwebrtc/api/video/test/i210_buffer_unittest.cc
new file mode 100644
index 0000000000..aaa231b6d2
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/i210_buffer_unittest.cc
@@ -0,0 +1,126 @@
+
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i210_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "test/frame_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+
+int GetY(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
+ return buf->DataY()[row * buf->StrideY() + col];
+}
+
+int GetU(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
+ return buf->DataU()[row * buf->StrideU() + col];
+}
+
+int GetV(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
+ return buf->DataV()[row * buf->StrideV() + col];
+}
+
+void FillI210Buffer(rtc::scoped_refptr<I210Buffer> buf) {
+ const uint16_t Y = 4;
+ const uint16_t U = 8;
+ const uint16_t V = 16;
+ for (int row = 0; row < buf->height(); ++row) {
+ for (int col = 0; col < buf->width(); ++col) {
+ buf->MutableDataY()[row * buf->StrideY() + col] = Y;
+ }
+ }
+ for (int row = 0; row < buf->ChromaHeight(); ++row) {
+ for (int col = 0; col < buf->ChromaWidth(); ++col) {
+ buf->MutableDataU()[row * buf->StrideU() + col] = U;
+ buf->MutableDataV()[row * buf->StrideV() + col] = V;
+ }
+ }
+}
+
+} // namespace
+
+TEST(I210BufferTest, InitialData) {
+ constexpr int stride = 3;
+ constexpr int halfstride = (stride + 1) >> 1;
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
+ EXPECT_EQ(width, i210_buffer->width());
+ EXPECT_EQ(height, i210_buffer->height());
+ EXPECT_EQ(stride, i210_buffer->StrideY());
+ EXPECT_EQ(halfstride, i210_buffer->StrideU());
+ EXPECT_EQ(halfstride, i210_buffer->StrideV());
+ EXPECT_EQ(halfwidth, i210_buffer->ChromaWidth());
+ EXPECT_EQ(height, i210_buffer->ChromaHeight());
+}
+
+TEST(I210BufferTest, ReadPixels) {
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
+ // Y = 4, U = 8, V = 16.
+ FillI210Buffer(i210_buffer);
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(4, GetY(i210_buffer, col, row));
+ }
+ }
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < halfwidth; col++) {
+ EXPECT_EQ(8, GetU(i210_buffer, col, row));
+ EXPECT_EQ(16, GetV(i210_buffer, col, row));
+ }
+ }
+}
+
+TEST(I210BufferTest, ToI420) {
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+ constexpr int size = width * height;
+ constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
+ memset(reference->MutableDataY(), 1, size);
+ memset(reference->MutableDataU(), 2, quartersize);
+ memset(reference->MutableDataV(), 4, quartersize);
+
+ rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
+ // Y = 4, U = 8, V = 16.
+ FillI210Buffer(i210_buffer);
+
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(4, GetY(i210_buffer, col, row));
+ }
+ }
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < halfwidth; col++) {
+ EXPECT_EQ(8, GetU(i210_buffer, col, row));
+ EXPECT_EQ(16, GetV(i210_buffer, col, row));
+ }
+ }
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer(i210_buffer->ToI420());
+ EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
+ EXPECT_EQ(height, i420_buffer->height());
+ EXPECT_EQ(width, i420_buffer->width());
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/i410_buffer_unittest.cc b/third_party/libwebrtc/api/video/test/i410_buffer_unittest.cc
new file mode 100644
index 0000000000..c5d2d5bf2d
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/i410_buffer_unittest.cc
@@ -0,0 +1,120 @@
+
+/*
+ * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i410_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "test/frame_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+constexpr uint16_t kYValue = 4;
+constexpr uint16_t kUValue = 8;
+constexpr uint16_t kVValue = 16;
+
+int GetY(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
+ return buf->DataY()[row * buf->StrideY() + col];
+}
+
+int GetU(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
+ return buf->DataU()[row * buf->StrideU() + col];
+}
+
+int GetV(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
+ return buf->DataV()[row * buf->StrideV() + col];
+}
+
+void FillI410Buffer(rtc::scoped_refptr<I410Buffer> buf) {
+ for (int row = 0; row < buf->height(); ++row) {
+ for (int col = 0; col < buf->width(); ++col) {
+ buf->MutableDataY()[row * buf->StrideY() + col] = kYValue;
+ buf->MutableDataU()[row * buf->StrideU() + col] = kUValue;
+ buf->MutableDataV()[row * buf->StrideV() + col] = kVValue;
+ }
+ }
+}
+
+} // namespace
+
+TEST(I410BufferTest, InitialData) {
+ constexpr int stride = 3;
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
+ EXPECT_EQ(width, i410_buffer->width());
+ EXPECT_EQ(height, i410_buffer->height());
+ EXPECT_EQ(stride, i410_buffer->StrideY());
+ EXPECT_EQ(stride, i410_buffer->StrideU());
+ EXPECT_EQ(stride, i410_buffer->StrideV());
+ EXPECT_EQ(3, i410_buffer->ChromaWidth());
+ EXPECT_EQ(3, i410_buffer->ChromaHeight());
+}
+
+TEST(I410BufferTest, ReadPixels) {
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
+ FillI410Buffer(i410_buffer);
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(kYValue, GetY(i410_buffer, col, row));
+ EXPECT_EQ(kUValue, GetU(i410_buffer, col, row));
+ EXPECT_EQ(kVValue, GetV(i410_buffer, col, row));
+ }
+ }
+}
+
+TEST(I410BufferTest, ToI420) {
+ // libyuv I410ToI420 only handles correctly even sizes and skips last row/col
+ // if odd.
+ constexpr int width = 4;
+ constexpr int height = 4;
+ constexpr int size_y = width * height;
+ constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
+ constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
+ // I410 is 10-bit while I420 is 8 bit, so last 2 bits would be discarded.
+ memset(reference->MutableDataY(), kYValue >> 2, size_y);
+ memset(reference->MutableDataU(), kUValue >> 2, size_u);
+ memset(reference->MutableDataV(), kVValue >> 2, size_v);
+
+ rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
+ FillI410Buffer(i410_buffer);
+
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(kYValue, GetY(i410_buffer, col, row));
+ EXPECT_EQ(kUValue, GetU(i410_buffer, col, row));
+ EXPECT_EQ(kVValue, GetV(i410_buffer, col, row));
+ }
+ }
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer(i410_buffer->ToI420());
+
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(1, i420_buffer->DataY()[row * i420_buffer->StrideY() + col]);
+ }
+ }
+
+ EXPECT_EQ(height, i420_buffer->height());
+ EXPECT_EQ(width, i420_buffer->width());
+ EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/i422_buffer_unittest.cc b/third_party/libwebrtc/api/video/test/i422_buffer_unittest.cc
new file mode 100644
index 0000000000..499b268546
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/i422_buffer_unittest.cc
@@ -0,0 +1,128 @@
+
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i422_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "test/frame_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+int GetY(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
+ return buf->DataY()[row * buf->StrideY() + col];
+}
+
+int GetU(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
+ return buf->DataU()[row * buf->StrideU() + col];
+}
+
+int GetV(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
+ return buf->DataV()[row * buf->StrideV() + col];
+}
+
+void FillI422Buffer(rtc::scoped_refptr<I422Buffer> buf) {
+ const uint8_t Y = 1;
+ const uint8_t U = 2;
+ const uint8_t V = 3;
+ for (int row = 0; row < buf->height(); ++row) {
+ for (int col = 0; col < buf->width(); ++col) {
+ buf->MutableDataY()[row * buf->StrideY() + col] = Y;
+ }
+ }
+ for (int row = 0; row < buf->ChromaHeight(); ++row) {
+ for (int col = 0; col < buf->ChromaWidth(); ++col) {
+ buf->MutableDataU()[row * buf->StrideU() + col] = U;
+ buf->MutableDataV()[row * buf->StrideV() + col] = V;
+ }
+ }
+}
+
+} // namespace
+
+TEST(I422BufferTest, InitialData) {
+ constexpr int stride = 3;
+ constexpr int halfstride = (stride + 1) >> 1;
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
+ EXPECT_EQ(width, i422_buffer->width());
+ EXPECT_EQ(height, i422_buffer->height());
+ EXPECT_EQ(stride, i422_buffer->StrideY());
+ EXPECT_EQ(halfstride, i422_buffer->StrideU());
+ EXPECT_EQ(halfstride, i422_buffer->StrideV());
+ EXPECT_EQ(halfwidth, i422_buffer->ChromaWidth());
+ EXPECT_EQ(height, i422_buffer->ChromaHeight());
+}
+
+TEST(I422BufferTest, ReadPixels) {
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
+ // Y = 1, U = 2, V = 3.
+ FillI422Buffer(i422_buffer);
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(1, GetY(i422_buffer, col, row));
+ }
+ }
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < halfwidth; col++) {
+ EXPECT_EQ(2, GetU(i422_buffer, col, row));
+ EXPECT_EQ(3, GetV(i422_buffer, col, row));
+ }
+ }
+}
+
+TEST(I422BufferTest, ToI420) {
+ constexpr int width = 3;
+ constexpr int halfwidth = (width + 1) >> 1;
+ constexpr int height = 3;
+ constexpr int size = width * height;
+ constexpr int halfsize = (width + 1) / 2 * height;
+ constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
+ memset(reference->MutableDataY(), 8, size);
+ memset(reference->MutableDataU(), 4, quartersize);
+ memset(reference->MutableDataV(), 2, quartersize);
+
+ rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
+ // Convert the reference buffer to I422.
+ memset(i422_buffer->MutableDataY(), 8, size);
+ memset(i422_buffer->MutableDataU(), 4, halfsize);
+ memset(i422_buffer->MutableDataV(), 2, halfsize);
+
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(8, GetY(i422_buffer, col, row));
+ }
+ }
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < halfwidth; col++) {
+ EXPECT_EQ(4, GetU(i422_buffer, col, row));
+ EXPECT_EQ(2, GetV(i422_buffer, col, row));
+ }
+ }
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer(i422_buffer->ToI420());
+ EXPECT_EQ(height, i420_buffer->height());
+ EXPECT_EQ(width, i420_buffer->width());
+ EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/i444_buffer_unittest.cc b/third_party/libwebrtc/api/video/test/i444_buffer_unittest.cc
new file mode 100644
index 0000000000..9a1a9315aa
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/i444_buffer_unittest.cc
@@ -0,0 +1,112 @@
+
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i444_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "test/frame_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+int GetY(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
+ return buf->DataY()[row * buf->StrideY() + col];
+}
+
+int GetU(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
+ return buf->DataU()[row * buf->StrideU() + col];
+}
+
+int GetV(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
+ return buf->DataV()[row * buf->StrideV() + col];
+}
+
+void FillI444Buffer(rtc::scoped_refptr<I444Buffer> buf) {
+ const uint8_t Y = 1;
+ const uint8_t U = 2;
+ const uint8_t V = 3;
+ for (int row = 0; row < buf->height(); ++row) {
+ for (int col = 0; col < buf->width(); ++col) {
+ buf->MutableDataY()[row * buf->StrideY() + col] = Y;
+ buf->MutableDataU()[row * buf->StrideU() + col] = U;
+ buf->MutableDataV()[row * buf->StrideV() + col] = V;
+ }
+ }
+}
+
+} // namespace
+
+TEST(I444BufferTest, InitialData) {
+ constexpr int stride = 3;
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
+ EXPECT_EQ(width, i444_buffer->width());
+ EXPECT_EQ(height, i444_buffer->height());
+ EXPECT_EQ(stride, i444_buffer->StrideY());
+ EXPECT_EQ(stride, i444_buffer->StrideU());
+ EXPECT_EQ(stride, i444_buffer->StrideV());
+ EXPECT_EQ(3, i444_buffer->ChromaWidth());
+ EXPECT_EQ(3, i444_buffer->ChromaHeight());
+}
+
+TEST(I444BufferTest, ReadPixels) {
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
+ // Y = 1, U = 2, V = 3.
+ FillI444Buffer(i444_buffer);
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(1, GetY(i444_buffer, col, row));
+ EXPECT_EQ(2, GetU(i444_buffer, col, row));
+ EXPECT_EQ(3, GetV(i444_buffer, col, row));
+ }
+ }
+}
+
+TEST(I444BufferTest, ToI420) {
+ constexpr int width = 3;
+ constexpr int height = 3;
+ constexpr int size_y = width * height;
+ constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
+ constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
+ memset(reference->MutableDataY(), 8, size_y);
+ memset(reference->MutableDataU(), 4, size_u);
+ memset(reference->MutableDataV(), 2, size_v);
+
+ rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
+ // Convert the reference buffer to I444.
+ memset(i444_buffer->MutableDataY(), 8, size_y);
+ memset(i444_buffer->MutableDataU(), 4, size_y);
+ memset(i444_buffer->MutableDataV(), 2, size_y);
+
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(8, GetY(i444_buffer, col, row));
+ EXPECT_EQ(4, GetU(i444_buffer, col, row));
+ EXPECT_EQ(2, GetV(i444_buffer, col, row));
+ }
+ }
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer(i444_buffer->ToI420());
+ EXPECT_EQ(height, i420_buffer->height());
+ EXPECT_EQ(width, i420_buffer->width());
+ EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/mock_recordable_encoded_frame.h b/third_party/libwebrtc/api/video/test/mock_recordable_encoded_frame.h
new file mode 100644
index 0000000000..2178932d2a
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/mock_recordable_encoded_frame.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
+#define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
+
+#include "api/video/recordable_encoded_frame.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+class MockRecordableEncodedFrame : public RecordableEncodedFrame {
+ public:
+ MOCK_METHOD(rtc::scoped_refptr<const EncodedImageBufferInterface>,
+ encoded_buffer,
+ (),
+ (const, override));
+ MOCK_METHOD(absl::optional<webrtc::ColorSpace>,
+ color_space,
+ (),
+ (const, override));
+ MOCK_METHOD(VideoCodecType, codec, (), (const, override));
+ MOCK_METHOD(bool, is_key_frame, (), (const, override));
+ MOCK_METHOD(EncodedResolution, resolution, (), (const, override));
+ MOCK_METHOD(Timestamp, render_time, (), (const, override));
+};
+} // namespace webrtc
+#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
diff --git a/third_party/libwebrtc/api/video/test/nv12_buffer_unittest.cc b/third_party/libwebrtc/api/video/test/nv12_buffer_unittest.cc
new file mode 100644
index 0000000000..d84adb5bf5
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/nv12_buffer_unittest.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/nv12_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "test/frame_utils.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+namespace {
+int GetY(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
+ return buf->DataY()[row * buf->StrideY() + col];
+}
+
+int GetU(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
+ return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2];
+}
+
+int GetV(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
+ return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2 + 1];
+}
+
+void FillNV12Buffer(rtc::scoped_refptr<NV12Buffer> buf) {
+ const uint8_t Y = 1;
+ const uint8_t U = 2;
+ const uint8_t V = 3;
+ for (int row = 0; row < buf->height(); ++row) {
+ for (int col = 0; col < buf->width(); ++col) {
+ buf->MutableDataY()[row * buf->StrideY() + col] = Y;
+ }
+ }
+ // Fill interleaving UV values.
+ for (int row = 0; row < buf->ChromaHeight(); row++) {
+ for (int col = 0; col < buf->StrideUV(); col += 2) {
+ int uv_index = row * buf->StrideUV() + col;
+ buf->MutableDataUV()[uv_index] = U;
+ buf->MutableDataUV()[uv_index + 1] = V;
+ }
+ }
+}
+
+} // namespace
+
+TEST(NV12BufferTest, InitialData) {
+ constexpr int stride_y = 3;
+ constexpr int stride_uv = 4;
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
+ EXPECT_EQ(width, nv12_buffer->width());
+ EXPECT_EQ(height, nv12_buffer->height());
+ EXPECT_EQ(stride_y, nv12_buffer->StrideY());
+ EXPECT_EQ(stride_uv, nv12_buffer->StrideUV());
+ EXPECT_EQ(2, nv12_buffer->ChromaWidth());
+ EXPECT_EQ(2, nv12_buffer->ChromaHeight());
+}
+
+TEST(NV12BufferTest, ReadPixels) {
+ constexpr int width = 3;
+ constexpr int height = 3;
+
+ rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
+ // Y = 1, U = 2, V = 3.
+ FillNV12Buffer(nv12_buffer);
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(1, GetY(nv12_buffer, col, row));
+ EXPECT_EQ(2, GetU(nv12_buffer, col, row));
+ EXPECT_EQ(3, GetV(nv12_buffer, col, row));
+ }
+ }
+}
+
+TEST(NV12BufferTest, ToI420) {
+ constexpr int width = 3;
+ constexpr int height = 3;
+ constexpr int size_y = width * height;
+ constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
+ constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
+ rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
+ memset(reference->MutableDataY(), 8, size_y);
+ memset(reference->MutableDataU(), 4, size_u);
+ memset(reference->MutableDataV(), 2, size_v);
+
+ rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
+ // Convert the reference buffer to NV12.
+ memset(nv12_buffer->MutableDataY(), 8, size_y);
+ // Interleaving u/v values.
+ for (int i = 0; i < size_u + size_v; i += 2) {
+ nv12_buffer->MutableDataUV()[i] = 4;
+ nv12_buffer->MutableDataUV()[i + 1] = 2;
+ }
+ // Confirm YUV values are as expected.
+ for (int row = 0; row < height; row++) {
+ for (int col = 0; col < width; col++) {
+ EXPECT_EQ(8, GetY(nv12_buffer, col, row));
+ EXPECT_EQ(4, GetU(nv12_buffer, col, row));
+ EXPECT_EQ(2, GetV(nv12_buffer, col, row));
+ }
+ }
+
+ rtc::scoped_refptr<I420BufferInterface> i420_buffer(nv12_buffer->ToI420());
+ EXPECT_EQ(height, i420_buffer->height());
+ EXPECT_EQ(width, i420_buffer->width());
+ EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/video_adaptation_counters_unittest.cc b/third_party/libwebrtc/api/video/test/video_adaptation_counters_unittest.cc
new file mode 100644
index 0000000000..a7d0bda7d2
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/video_adaptation_counters_unittest.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_adaptation_counters.h"
+
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(AdaptationCountersTest, Addition) {
+ VideoAdaptationCounters a{0, 0};
+ VideoAdaptationCounters b{1, 2};
+ VideoAdaptationCounters total = a + b;
+ EXPECT_EQ(1, total.resolution_adaptations);
+ EXPECT_EQ(2, total.fps_adaptations);
+}
+
+TEST(AdaptationCountersTest, Equality) {
+ VideoAdaptationCounters a{1, 2};
+ VideoAdaptationCounters b{2, 1};
+ EXPECT_EQ(a, a);
+ EXPECT_NE(a, b);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/video_bitrate_allocation_unittest.cc b/third_party/libwebrtc/api/video/test/video_bitrate_allocation_unittest.cc
new file mode 100644
index 0000000000..8e66d4b0a1
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/video_bitrate_allocation_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_bitrate_allocation.h"
+
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+TEST(VideoBitrateAllocation, SimulcastTargetBitrate) {
+ VideoBitrateAllocation bitrate;
+ bitrate.SetBitrate(0, 0, 10000);
+ bitrate.SetBitrate(0, 1, 20000);
+ bitrate.SetBitrate(1, 0, 40000);
+ bitrate.SetBitrate(1, 1, 80000);
+
+ VideoBitrateAllocation layer0_bitrate;
+ layer0_bitrate.SetBitrate(0, 0, 10000);
+ layer0_bitrate.SetBitrate(0, 1, 20000);
+
+ VideoBitrateAllocation layer1_bitrate;
+ layer1_bitrate.SetBitrate(0, 0, 40000);
+ layer1_bitrate.SetBitrate(0, 1, 80000);
+
+ std::vector<absl::optional<VideoBitrateAllocation>> layer_allocations =
+ bitrate.GetSimulcastAllocations();
+
+ EXPECT_EQ(layer0_bitrate, layer_allocations[0]);
+ EXPECT_EQ(layer1_bitrate, layer_allocations[1]);
+}
+
+TEST(VideoBitrateAllocation, SimulcastTargetBitrateWithInactiveStream) {
+ // Create bitrate allocation with bitrate only for the first and third stream.
+ VideoBitrateAllocation bitrate;
+ bitrate.SetBitrate(0, 0, 10000);
+ bitrate.SetBitrate(0, 1, 20000);
+ bitrate.SetBitrate(2, 0, 40000);
+ bitrate.SetBitrate(2, 1, 80000);
+
+ VideoBitrateAllocation layer0_bitrate;
+ layer0_bitrate.SetBitrate(0, 0, 10000);
+ layer0_bitrate.SetBitrate(0, 1, 20000);
+
+ VideoBitrateAllocation layer2_bitrate;
+ layer2_bitrate.SetBitrate(0, 0, 40000);
+ layer2_bitrate.SetBitrate(0, 1, 80000);
+
+ std::vector<absl::optional<VideoBitrateAllocation>> layer_allocations =
+ bitrate.GetSimulcastAllocations();
+
+ EXPECT_EQ(layer0_bitrate, layer_allocations[0]);
+ EXPECT_FALSE(layer_allocations[1]);
+ EXPECT_EQ(layer2_bitrate, layer_allocations[2]);
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/test/video_frame_matchers.h b/third_party/libwebrtc/api/video/test/video_frame_matchers.h
new file mode 100644
index 0000000000..250459377b
--- /dev/null
+++ b/third_party/libwebrtc/api/video/test/video_frame_matchers.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
+#define API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
+
+#include "api/rtp_packet_infos.h"
+#include "api/video/video_frame.h"
+#include "test/gmock.h"
+
+namespace webrtc::test::video_frame_matchers {
+
+MATCHER_P(Rotation, rotation, "") {
+ return ::testing::Matches(::testing::Eq(rotation))(arg.rotation());
+}
+
+MATCHER_P(NtpTimestamp, ntp_ts, "") {
+ return arg.ntp_time_ms() == ntp_ts.ms();
+}
+
+MATCHER_P(PacketInfos, m, "") {
+ return ::testing::Matches(m)(arg.packet_infos());
+}
+
+} // namespace webrtc::test::video_frame_matchers
+
+#endif // API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
diff --git a/third_party/libwebrtc/api/video/video_adaptation_counters.cc b/third_party/libwebrtc/api/video/video_adaptation_counters.cc
new file mode 100644
index 0000000000..df1769d5d4
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_adaptation_counters.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_adaptation_counters.h"
+
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+bool VideoAdaptationCounters::operator==(
+ const VideoAdaptationCounters& rhs) const {
+ return fps_adaptations == rhs.fps_adaptations &&
+ resolution_adaptations == rhs.resolution_adaptations;
+}
+
+bool VideoAdaptationCounters::operator!=(
+ const VideoAdaptationCounters& rhs) const {
+ return !(rhs == *this);
+}
+
+VideoAdaptationCounters VideoAdaptationCounters::operator+(
+ const VideoAdaptationCounters& other) const {
+ return VideoAdaptationCounters(
+ resolution_adaptations + other.resolution_adaptations,
+ fps_adaptations + other.fps_adaptations);
+}
+
+std::string VideoAdaptationCounters::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations
+ << " }";
+ return ss.Release();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_adaptation_counters.h b/third_party/libwebrtc/api/video/video_adaptation_counters.h
new file mode 100644
index 0000000000..2dea902f2f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_adaptation_counters.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2020 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
+#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
+
+#include <string>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+// Counts the number of adaptations have resulted due to resource overuse.
+// Today we can adapt resolution and fps.
+struct VideoAdaptationCounters {
+ VideoAdaptationCounters() : resolution_adaptations(0), fps_adaptations(0) {}
+ VideoAdaptationCounters(int resolution_adaptations, int fps_adaptations)
+ : resolution_adaptations(resolution_adaptations),
+ fps_adaptations(fps_adaptations) {
+ RTC_DCHECK_GE(resolution_adaptations, 0);
+ RTC_DCHECK_GE(fps_adaptations, 0);
+ }
+
+ int Total() const { return fps_adaptations + resolution_adaptations; }
+
+ bool operator==(const VideoAdaptationCounters& rhs) const;
+ bool operator!=(const VideoAdaptationCounters& rhs) const;
+
+ VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const;
+
+ std::string ToString() const;
+
+ int resolution_adaptations;
+ int fps_adaptations;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
diff --git a/third_party/libwebrtc/api/video/video_adaptation_gn/moz.build b/third_party/libwebrtc/api/video/video_adaptation_gn/moz.build
new file mode 100644
index 0000000000..3b4495f26c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_adaptation_gn/moz.build
@@ -0,0 +1,221 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/video_adaptation_counters.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_adaptation_gn")
diff --git a/third_party/libwebrtc/api/video/video_adaptation_reason.h b/third_party/libwebrtc/api/video/video_adaptation_reason.h
new file mode 100644
index 0000000000..3b7fc36eed
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_adaptation_reason.h
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_ADAPTATION_REASON_H_
+#define API_VIDEO_VIDEO_ADAPTATION_REASON_H_
+
+namespace webrtc {
+
+enum class VideoAdaptationReason { kQuality, kCpu };
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_ADAPTATION_REASON_H_
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocation.cc b/third_party/libwebrtc/api/video/video_bitrate_allocation.cc
new file mode 100644
index 0000000000..e189db1c19
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocation.cc
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_bitrate_allocation.h"
+
+#include <cstdint>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+VideoBitrateAllocation::VideoBitrateAllocation()
+ : sum_(0), is_bw_limited_(false) {}
+
+bool VideoBitrateAllocation::SetBitrate(size_t spatial_index,
+ size_t temporal_index,
+ uint32_t bitrate_bps) {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
+ int64_t new_bitrate_sum_bps = sum_;
+ absl::optional<uint32_t>& layer_bitrate =
+ bitrates_[spatial_index][temporal_index];
+ if (layer_bitrate) {
+ RTC_DCHECK_LE(*layer_bitrate, sum_);
+ new_bitrate_sum_bps -= *layer_bitrate;
+ }
+ new_bitrate_sum_bps += bitrate_bps;
+ if (new_bitrate_sum_bps > kMaxBitrateBps)
+ return false;
+
+ layer_bitrate = bitrate_bps;
+ sum_ = rtc::dchecked_cast<uint32_t>(new_bitrate_sum_bps);
+ return true;
+}
+
+bool VideoBitrateAllocation::HasBitrate(size_t spatial_index,
+ size_t temporal_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
+ return bitrates_[spatial_index][temporal_index].has_value();
+}
+
+uint32_t VideoBitrateAllocation::GetBitrate(size_t spatial_index,
+ size_t temporal_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
+ return bitrates_[spatial_index][temporal_index].value_or(0);
+}
+
+// Whether the specific spatial layers has the bitrate set in any of its
+// temporal layers.
+bool VideoBitrateAllocation::IsSpatialLayerUsed(size_t spatial_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ for (size_t i = 0; i < kMaxTemporalStreams; ++i) {
+ if (bitrates_[spatial_index][i].has_value())
+ return true;
+ }
+ return false;
+}
+
+// Get the sum of all the temporal layer for a specific spatial layer.
+uint32_t VideoBitrateAllocation::GetSpatialLayerSum(
+ size_t spatial_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ return GetTemporalLayerSum(spatial_index, kMaxTemporalStreams - 1);
+}
+
+uint32_t VideoBitrateAllocation::GetTemporalLayerSum(
+ size_t spatial_index,
+ size_t temporal_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
+ uint32_t sum = 0;
+ for (size_t i = 0; i <= temporal_index; ++i) {
+ sum += bitrates_[spatial_index][i].value_or(0);
+ }
+ return sum;
+}
+
+std::vector<uint32_t> VideoBitrateAllocation::GetTemporalLayerAllocation(
+ size_t spatial_index) const {
+ RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
+ std::vector<uint32_t> temporal_rates;
+
+ // Find the highest temporal layer with a defined bitrate in order to
+ // determine the size of the temporal layer allocation.
+ for (size_t i = kMaxTemporalStreams; i > 0; --i) {
+ if (bitrates_[spatial_index][i - 1].has_value()) {
+ temporal_rates.resize(i);
+ break;
+ }
+ }
+
+ for (size_t i = 0; i < temporal_rates.size(); ++i) {
+ temporal_rates[i] = bitrates_[spatial_index][i].value_or(0);
+ }
+
+ return temporal_rates;
+}
+
+std::vector<absl::optional<VideoBitrateAllocation>>
+VideoBitrateAllocation::GetSimulcastAllocations() const {
+ std::vector<absl::optional<VideoBitrateAllocation>> bitrates;
+ for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
+ absl::optional<VideoBitrateAllocation> layer_bitrate;
+ if (IsSpatialLayerUsed(si)) {
+ layer_bitrate = VideoBitrateAllocation();
+ for (int tl = 0; tl < kMaxTemporalStreams; ++tl) {
+ if (HasBitrate(si, tl))
+ layer_bitrate->SetBitrate(0, tl, GetBitrate(si, tl));
+ }
+ }
+ bitrates.push_back(layer_bitrate);
+ }
+ return bitrates;
+}
+
+bool VideoBitrateAllocation::operator==(
+ const VideoBitrateAllocation& other) const {
+ for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
+ for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
+ if (bitrates_[si][ti] != other.bitrates_[si][ti])
+ return false;
+ }
+ }
+ return true;
+}
+
+std::string VideoBitrateAllocation::ToString() const {
+ if (sum_ == 0)
+ return "VideoBitrateAllocation [ [] ]";
+
+ // Max string length in practice is 260, but let's have some overhead and
+ // round up to nearest power of two.
+ char string_buf[512];
+ rtc::SimpleStringBuilder ssb(string_buf);
+
+ ssb << "VideoBitrateAllocation [";
+ uint32_t spatial_cumulator = 0;
+ for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
+ RTC_DCHECK_LE(spatial_cumulator, sum_);
+ if (spatial_cumulator == sum_)
+ break;
+
+ const uint32_t layer_sum = GetSpatialLayerSum(si);
+ if (layer_sum == sum_ && si == 0) {
+ ssb << " [";
+ } else {
+ if (si > 0)
+ ssb << ",";
+ ssb << '\n' << " [";
+ }
+ spatial_cumulator += layer_sum;
+
+ uint32_t temporal_cumulator = 0;
+ for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
+ RTC_DCHECK_LE(temporal_cumulator, layer_sum);
+ if (temporal_cumulator == layer_sum)
+ break;
+
+ if (ti > 0)
+ ssb << ", ";
+
+ uint32_t bitrate = bitrates_[si][ti].value_or(0);
+ ssb << bitrate;
+ temporal_cumulator += bitrate;
+ }
+ ssb << "]";
+ }
+
+ RTC_DCHECK_EQ(spatial_cumulator, sum_);
+ ssb << " ]";
+ return ssb.str();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocation.h b/third_party/libwebrtc/api/video/video_bitrate_allocation.h
new file mode 100644
index 0000000000..4feffa2e66
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocation.h
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
+#define API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <limits>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video/video_codec_constants.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Class that describes how video bitrate, in bps, is allocated across temporal
+// and spatial layers. Not that bitrates are NOT cumulative. Depending on if
+// layers are dependent or not, it is up to the user to aggregate.
+// For each index, the bitrate can also both set and unset. This is used with a
+// set bps = 0 to signal an explicit "turn off" signal.
+class RTC_EXPORT VideoBitrateAllocation {
+ public:
+ static constexpr uint32_t kMaxBitrateBps =
+ std::numeric_limits<uint32_t>::max();
+ VideoBitrateAllocation();
+
+ bool SetBitrate(size_t spatial_index,
+ size_t temporal_index,
+ uint32_t bitrate_bps);
+
+ bool HasBitrate(size_t spatial_index, size_t temporal_index) const;
+
+ uint32_t GetBitrate(size_t spatial_index, size_t temporal_index) const;
+
+ // Whether the specific spatial layers has the bitrate set in any of its
+ // temporal layers.
+ bool IsSpatialLayerUsed(size_t spatial_index) const;
+
+ // Get the sum of all the temporal layer for a specific spatial layer.
+ uint32_t GetSpatialLayerSum(size_t spatial_index) const;
+
+ // Sum of bitrates of temporal layers, from layer 0 to `temporal_index`
+ // inclusive, of specified spatial layer `spatial_index`. Bitrates of lower
+ // spatial layers are not included.
+ uint32_t GetTemporalLayerSum(size_t spatial_index,
+ size_t temporal_index) const;
+
+ // Returns a vector of the temporal layer bitrates for the specific spatial
+ // layer. Length of the returned vector is cropped to the highest temporal
+ // layer with a defined bitrate.
+ std::vector<uint32_t> GetTemporalLayerAllocation(size_t spatial_index) const;
+
+ // Returns one VideoBitrateAllocation for each spatial layer. This is used to
+ // configure simulcast streams. Note that the length of the returned vector is
+ // always kMaxSpatialLayers, the optional is unset for unused layers.
+ std::vector<absl::optional<VideoBitrateAllocation>> GetSimulcastAllocations()
+ const;
+
+ uint32_t get_sum_bps() const { return sum_; } // Sum of all bitrates.
+ uint32_t get_sum_kbps() const {
+ // Round down to not exceed the allocated bitrate.
+ return sum_ / 1000;
+ }
+
+ bool operator==(const VideoBitrateAllocation& other) const;
+ inline bool operator!=(const VideoBitrateAllocation& other) const {
+ return !(*this == other);
+ }
+
+ std::string ToString() const;
+
+ // Indicates if the allocation has some layers/streams disabled due to
+ // low available bandwidth.
+ void set_bw_limited(bool limited) { is_bw_limited_ = limited; }
+ bool is_bw_limited() const { return is_bw_limited_; }
+
+ private:
+ uint32_t sum_;
+ absl::optional<uint32_t> bitrates_[kMaxSpatialLayers][kMaxTemporalStreams];
+ bool is_bw_limited_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocation_gn/moz.build b/third_party/libwebrtc/api/video/video_bitrate_allocation_gn/moz.build
new file mode 100644
index 0000000000..2c7b1f39e7
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocation_gn/moz.build
@@ -0,0 +1,221 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/video_bitrate_allocation.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_bitrate_allocation_gn")
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator.cc b/third_party/libwebrtc/api/video/video_bitrate_allocator.cc
new file mode 100644
index 0000000000..f4e843b348
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocator.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_bitrate_allocator.h"
+
+namespace webrtc {
+
+VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
+ uint32_t total_bitrate_bps,
+ uint32_t framerate)
+ : total_bitrate(DataRate::BitsPerSec(total_bitrate_bps)),
+ stable_bitrate(DataRate::BitsPerSec(total_bitrate_bps)),
+ framerate(static_cast<double>(framerate)) {}
+
+VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
+ DataRate total_bitrate,
+ double framerate)
+ : total_bitrate(total_bitrate),
+ stable_bitrate(total_bitrate),
+ framerate(framerate) {}
+
+VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
+ DataRate total_bitrate,
+ DataRate stable_bitrate,
+ double framerate)
+ : total_bitrate(total_bitrate),
+ stable_bitrate(stable_bitrate),
+ framerate(framerate) {}
+
+VideoBitrateAllocationParameters::~VideoBitrateAllocationParameters() = default;
+
+VideoBitrateAllocation VideoBitrateAllocator::GetAllocation(
+ uint32_t total_bitrate_bps,
+ uint32_t framerate) {
+ return Allocate({DataRate::BitsPerSec(total_bitrate_bps),
+ DataRate::BitsPerSec(total_bitrate_bps),
+ static_cast<double>(framerate)});
+}
+
+VideoBitrateAllocation VideoBitrateAllocator::Allocate(
+ VideoBitrateAllocationParameters parameters) {
+ return GetAllocation(parameters.total_bitrate.bps(), parameters.framerate);
+}
+
+void VideoBitrateAllocator::SetLegacyConferenceMode(bool enabled) {}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator.h b/third_party/libwebrtc/api/video/video_bitrate_allocator.h
new file mode 100644
index 0000000000..fdc86dbc57
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocator.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
+#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
+
+#include "api/units/data_rate.h"
+#include "api/video/video_bitrate_allocation.h"
+
+namespace webrtc {
+
+struct VideoBitrateAllocationParameters {
+ VideoBitrateAllocationParameters(uint32_t total_bitrate_bps,
+ uint32_t framerate);
+ VideoBitrateAllocationParameters(DataRate total_bitrate, double framerate);
+ VideoBitrateAllocationParameters(DataRate total_bitrate,
+ DataRate stable_bitrate,
+ double framerate);
+ ~VideoBitrateAllocationParameters();
+
+ DataRate total_bitrate;
+ DataRate stable_bitrate;
+ double framerate;
+};
+
+class VideoBitrateAllocator {
+ public:
+ VideoBitrateAllocator() {}
+ virtual ~VideoBitrateAllocator() {}
+
+ virtual VideoBitrateAllocation GetAllocation(uint32_t total_bitrate_bps,
+ uint32_t framerate);
+
+ virtual VideoBitrateAllocation Allocate(
+ VideoBitrateAllocationParameters parameters);
+
+ // Deprecated: Only used to work around issues with the legacy conference
+ // screenshare mode and shouldn't be needed by any subclasses.
+ virtual void SetLegacyConferenceMode(bool enabled);
+};
+
+class VideoBitrateAllocationObserver {
+ public:
+ VideoBitrateAllocationObserver() {}
+ virtual ~VideoBitrateAllocationObserver() {}
+
+ virtual void OnBitrateAllocationUpdated(
+ const VideoBitrateAllocation& allocation) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator_factory.h b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory.h
new file mode 100644
index 0000000000..cb34ebb5e1
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
+#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
+
+#include <memory>
+
+#include "api/video/video_bitrate_allocator.h"
+#include "api/video_codecs/video_codec.h"
+
+namespace webrtc {
+
+// A factory that creates VideoBitrateAllocator.
+// NOTE: This class is still under development and may change without notice.
+class VideoBitrateAllocatorFactory {
+ public:
+ virtual ~VideoBitrateAllocatorFactory() = default;
+ // Creates a VideoBitrateAllocator for a specific video codec.
+ virtual std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
+ const VideoCodec& codec) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build
new file mode 100644
index 0000000000..a6e2e2c6f5
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_bitrate_allocator_factory_gn")
diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator_gn/moz.build b/third_party/libwebrtc/api/video/video_bitrate_allocator_gn/moz.build
new file mode 100644
index 0000000000..f09476aacb
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_bitrate_allocator_gn/moz.build
@@ -0,0 +1,221 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/video_bitrate_allocator.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_bitrate_allocator_gn")
diff --git a/third_party/libwebrtc/api/video/video_codec_constants.h b/third_party/libwebrtc/api/video/video_codec_constants.h
new file mode 100644
index 0000000000..5859f9b4cf
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_codec_constants.h
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
+#define API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
+
+namespace webrtc {
+
+enum : int { kMaxEncoderBuffers = 8 };
+enum : int { kMaxSimulcastStreams = 3 };
+enum : int { kMaxSpatialLayers = 5 };
+enum : int { kMaxTemporalStreams = 4 };
+enum : int { kMaxPreferredPixelFormats = 5 };
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
diff --git a/third_party/libwebrtc/api/video/video_codec_constants_gn/moz.build b/third_party/libwebrtc/api/video/video_codec_constants_gn/moz.build
new file mode 100644
index 0000000000..2ac72c5b54
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_codec_constants_gn/moz.build
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_codec_constants_gn")
diff --git a/third_party/libwebrtc/api/video/video_codec_type.h b/third_party/libwebrtc/api/video/video_codec_type.h
new file mode 100644
index 0000000000..74a4bc4258
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_codec_type.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_CODEC_TYPE_H_
+#define API_VIDEO_VIDEO_CODEC_TYPE_H_
+
+namespace webrtc {
+
+enum VideoCodecType {
+ // There are various memset(..., 0, ...) calls in the code that rely on
+ // kVideoCodecGeneric being zero.
+ kVideoCodecGeneric = 0,
+ kVideoCodecVP8,
+ kVideoCodecVP9,
+ kVideoCodecAV1,
+ kVideoCodecH264,
+ kVideoCodecMultiplex,
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_CODEC_TYPE_H_
diff --git a/third_party/libwebrtc/api/video/video_content_type.cc b/third_party/libwebrtc/api/video/video_content_type.cc
new file mode 100644
index 0000000000..9ba3ece79b
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_content_type.cc
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_content_type.h"
+
+// VideoContentType stored as a single byte, which is sent over the network.
+// Structure:
+//
+// 0 1 2 3 4 5 6 7
+// +---------------+
+// |r r e e e s s c|
+//
+// where:
+// r - reserved bits.
+// e - 3-bit number of an experiment group counted from 1. 0 means there's no
+// experiment ongoing.
+// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
+// no simulcast information is set.
+// c - content type. 0 means real-time video, 1 means screenshare.
+//
+
+namespace webrtc {
+namespace videocontenttypehelpers {
+
+namespace {
+static constexpr uint8_t kScreenshareBitsSize = 1;
+static constexpr uint8_t kScreenshareBitsMask =
+ (1u << kScreenshareBitsSize) - 1;
+
+static constexpr uint8_t kSimulcastShift = 1;
+static constexpr uint8_t kSimulcastBitsSize = 2;
+static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
+ << kSimulcastShift; // 0b00000110
+
+static constexpr uint8_t kExperimentShift = 3;
+static constexpr uint8_t kExperimentBitsSize = 3;
+static constexpr uint8_t kExperimentBitsMask =
+ ((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
+
+static constexpr uint8_t kTotalBitsSize =
+ kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
+} // namespace
+
+bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) {
+ // Store in bits 2-4.
+ if (experiment_id >= (1 << kExperimentBitsSize))
+ return false;
+ *content_type = static_cast<VideoContentType>(
+ (static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
+ ((experiment_id << kExperimentShift) & kExperimentBitsMask));
+ return true;
+}
+
+bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
+ // Store in bits 5-6.
+ if (simulcast_id >= (1 << kSimulcastBitsSize))
+ return false;
+ *content_type = static_cast<VideoContentType>(
+ (static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
+ ((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
+ return true;
+}
+
+uint8_t GetExperimentId(const VideoContentType& content_type) {
+ return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
+ kExperimentShift;
+}
+uint8_t GetSimulcastId(const VideoContentType& content_type) {
+ return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
+ kSimulcastShift;
+}
+
+bool IsScreenshare(const VideoContentType& content_type) {
+ return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
+}
+
+bool IsValidContentType(uint8_t value) {
+ // Any 6-bit value is allowed.
+ return value < (1 << kTotalBitsSize);
+}
+
+const char* ToString(const VideoContentType& content_type) {
+ return IsScreenshare(content_type) ? "screen" : "realtime";
+}
+} // namespace videocontenttypehelpers
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_content_type.h b/third_party/libwebrtc/api/video/video_content_type.h
new file mode 100644
index 0000000000..2d38a62366
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_content_type.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_
+#define API_VIDEO_VIDEO_CONTENT_TYPE_H_
+
+#include <stdint.h>
+
+namespace webrtc {
+
+enum class VideoContentType : uint8_t {
+ UNSPECIFIED = 0,
+ SCREENSHARE = 1,
+};
+
+namespace videocontenttypehelpers {
+bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id);
+bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id);
+
+uint8_t GetExperimentId(const VideoContentType& content_type);
+uint8_t GetSimulcastId(const VideoContentType& content_type);
+
+bool IsScreenshare(const VideoContentType& content_type);
+
+bool IsValidContentType(uint8_t value);
+
+const char* ToString(const VideoContentType& content_type);
+} // namespace videocontenttypehelpers
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_
diff --git a/third_party/libwebrtc/api/video/video_frame.cc b/third_party/libwebrtc/api/video/video_frame.cc
new file mode 100644
index 0000000000..130820a886
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame.cc
@@ -0,0 +1,317 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame.h"
+
+#include <algorithm>
+#include <utility>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+
+void VideoFrame::UpdateRect::Union(const UpdateRect& other) {
+ if (other.IsEmpty())
+ return;
+ if (IsEmpty()) {
+ *this = other;
+ return;
+ }
+ int right = std::max(offset_x + width, other.offset_x + other.width);
+ int bottom = std::max(offset_y + height, other.offset_y + other.height);
+ offset_x = std::min(offset_x, other.offset_x);
+ offset_y = std::min(offset_y, other.offset_y);
+ width = right - offset_x;
+ height = bottom - offset_y;
+ RTC_DCHECK_GT(width, 0);
+ RTC_DCHECK_GT(height, 0);
+}
+
+void VideoFrame::UpdateRect::Intersect(const UpdateRect& other) {
+ if (other.IsEmpty() || IsEmpty()) {
+ MakeEmptyUpdate();
+ return;
+ }
+
+ int right = std::min(offset_x + width, other.offset_x + other.width);
+ int bottom = std::min(offset_y + height, other.offset_y + other.height);
+ offset_x = std::max(offset_x, other.offset_x);
+ offset_y = std::max(offset_y, other.offset_y);
+ width = right - offset_x;
+ height = bottom - offset_y;
+ if (width <= 0 || height <= 0) {
+ MakeEmptyUpdate();
+ }
+}
+
+void VideoFrame::UpdateRect::MakeEmptyUpdate() {
+ width = height = offset_x = offset_y = 0;
+}
+
+bool VideoFrame::UpdateRect::IsEmpty() const {
+ return width == 0 && height == 0;
+}
+
+VideoFrame::UpdateRect VideoFrame::UpdateRect::ScaleWithFrame(
+ int frame_width,
+ int frame_height,
+ int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) const {
+ RTC_DCHECK_GT(frame_width, 0);
+ RTC_DCHECK_GT(frame_height, 0);
+
+ RTC_DCHECK_GT(crop_width, 0);
+ RTC_DCHECK_GT(crop_height, 0);
+
+ RTC_DCHECK_LE(crop_width + crop_x, frame_width);
+ RTC_DCHECK_LE(crop_height + crop_y, frame_height);
+
+ RTC_DCHECK_GT(scaled_width, 0);
+ RTC_DCHECK_GT(scaled_height, 0);
+
+ // Check if update rect is out of the cropped area.
+ if (offset_x + width < crop_x || offset_x > crop_x + crop_width ||
+ offset_y + height < crop_y || offset_y > crop_y + crop_width) {
+ return {0, 0, 0, 0};
+ }
+
+ int x = offset_x - crop_x;
+ int w = width;
+ if (x < 0) {
+ w += x;
+ x = 0;
+ }
+ int y = offset_y - crop_y;
+ int h = height;
+ if (y < 0) {
+ h += y;
+ y = 0;
+ }
+
+ // Lower corner is rounded down.
+ x = x * scaled_width / crop_width;
+ y = y * scaled_height / crop_height;
+ // Upper corner is rounded up.
+ w = (w * scaled_width + crop_width - 1) / crop_width;
+ h = (h * scaled_height + crop_height - 1) / crop_height;
+
+ // Round to full 2x2 blocks due to possible subsampling in the pixel data.
+ if (x % 2) {
+ --x;
+ ++w;
+ }
+ if (y % 2) {
+ --y;
+ ++h;
+ }
+ if (w % 2) {
+ ++w;
+ }
+ if (h % 2) {
+ ++h;
+ }
+
+ // Expand the update rect by 2 pixels in each direction to include any
+ // possible scaling artifacts.
+ if (scaled_width != crop_width || scaled_height != crop_height) {
+ if (x > 0) {
+ x -= 2;
+ w += 2;
+ }
+ if (y > 0) {
+ y -= 2;
+ h += 2;
+ }
+ w += 2;
+ h += 2;
+ }
+
+ // Ensure update rect is inside frame dimensions.
+ if (x + w > scaled_width) {
+ w = scaled_width - x;
+ }
+ if (y + h > scaled_height) {
+ h = scaled_height - y;
+ }
+ RTC_DCHECK_GE(w, 0);
+ RTC_DCHECK_GE(h, 0);
+ if (w == 0 || h == 0) {
+ w = 0;
+ h = 0;
+ x = 0;
+ y = 0;
+ }
+
+ return {x, y, w, h};
+}
+
+VideoFrame::Builder::Builder() = default;
+
+VideoFrame::Builder::~Builder() = default;
+
+VideoFrame VideoFrame::Builder::build() {
+ RTC_CHECK(video_frame_buffer_ != nullptr);
+ return VideoFrame(id_, video_frame_buffer_, timestamp_us_, timestamp_rtp_,
+ ntp_time_ms_, rotation_, color_space_, render_parameters_,
+ update_rect_, packet_infos_);
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_video_frame_buffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+ video_frame_buffer_ = buffer;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_timestamp_ms(
+ int64_t timestamp_ms) {
+ timestamp_us_ = timestamp_ms * rtc::kNumMicrosecsPerMillisec;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_timestamp_us(
+ int64_t timestamp_us) {
+ timestamp_us_ = timestamp_us;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_timestamp_rtp(
+ uint32_t timestamp_rtp) {
+ timestamp_rtp_ = timestamp_rtp;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_ntp_time_ms(int64_t ntp_time_ms) {
+ ntp_time_ms_ = ntp_time_ms;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_rotation(VideoRotation rotation) {
+ rotation_ = rotation;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_color_space(
+ const absl::optional<ColorSpace>& color_space) {
+ color_space_ = color_space;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_color_space(
+ const ColorSpace* color_space) {
+ color_space_ =
+ color_space ? absl::make_optional(*color_space) : absl::nullopt;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_id(uint16_t id) {
+ id_ = id;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_update_rect(
+ const absl::optional<VideoFrame::UpdateRect>& update_rect) {
+ update_rect_ = update_rect;
+ return *this;
+}
+
+VideoFrame::Builder& VideoFrame::Builder::set_packet_infos(
+ RtpPacketInfos packet_infos) {
+ packet_infos_ = std::move(packet_infos);
+ return *this;
+}
+
+VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ webrtc::VideoRotation rotation,
+ int64_t timestamp_us)
+ : video_frame_buffer_(buffer),
+ timestamp_rtp_(0),
+ ntp_time_ms_(0),
+ timestamp_us_(timestamp_us),
+ rotation_(rotation) {}
+
+VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ uint32_t timestamp_rtp,
+ int64_t render_time_ms,
+ VideoRotation rotation)
+ : video_frame_buffer_(buffer),
+ timestamp_rtp_(timestamp_rtp),
+ ntp_time_ms_(0),
+ timestamp_us_(render_time_ms * rtc::kNumMicrosecsPerMillisec),
+ rotation_(rotation) {
+ RTC_DCHECK(buffer);
+}
+
+VideoFrame::VideoFrame(uint16_t id,
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ int64_t timestamp_us,
+ uint32_t timestamp_rtp,
+ int64_t ntp_time_ms,
+ VideoRotation rotation,
+ const absl::optional<ColorSpace>& color_space,
+ const RenderParameters& render_parameters,
+ const absl::optional<UpdateRect>& update_rect,
+ RtpPacketInfos packet_infos)
+ : id_(id),
+ video_frame_buffer_(buffer),
+ timestamp_rtp_(timestamp_rtp),
+ ntp_time_ms_(ntp_time_ms),
+ timestamp_us_(timestamp_us),
+ rotation_(rotation),
+ color_space_(color_space),
+ render_parameters_(render_parameters),
+ update_rect_(update_rect),
+ packet_infos_(std::move(packet_infos)) {
+ if (update_rect_) {
+ RTC_DCHECK_GE(update_rect_->offset_x, 0);
+ RTC_DCHECK_GE(update_rect_->offset_y, 0);
+ RTC_DCHECK_LE(update_rect_->offset_x + update_rect_->width, width());
+ RTC_DCHECK_LE(update_rect_->offset_y + update_rect_->height, height());
+ }
+}
+
+VideoFrame::~VideoFrame() = default;
+
+VideoFrame::VideoFrame(const VideoFrame&) = default;
+VideoFrame::VideoFrame(VideoFrame&&) = default;
+VideoFrame& VideoFrame::operator=(const VideoFrame&) = default;
+VideoFrame& VideoFrame::operator=(VideoFrame&&) = default;
+
+int VideoFrame::width() const {
+ return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
+}
+
+int VideoFrame::height() const {
+ return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
+}
+
+uint32_t VideoFrame::size() const {
+ return width() * height();
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
+ return video_frame_buffer_;
+}
+
+void VideoFrame::set_video_frame_buffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
+ RTC_CHECK(buffer);
+ video_frame_buffer_ = buffer;
+}
+
+int64_t VideoFrame::render_time_ms() const {
+ return timestamp_us() / rtc::kNumMicrosecsPerMillisec;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_frame.h b/third_party/libwebrtc/api/video/video_frame.h
new file mode 100644
index 0000000000..086aad820f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame.h
@@ -0,0 +1,302 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_H_
+#define API_VIDEO_VIDEO_FRAME_H_
+
+#include <stdint.h>
+
+#include <utility>
+
+#include "absl/types/optional.h"
+#include "api/rtp_packet_infos.h"
+#include "api/scoped_refptr.h"
+#include "api/video/color_space.h"
+#include "api/video/hdr_metadata.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class RTC_EXPORT VideoFrame {
+ public:
+ // Value used to signal that `VideoFrame::id()` is not set.
+ static constexpr uint16_t kNotSetId = 0;
+
+ struct RTC_EXPORT UpdateRect {
+ int offset_x;
+ int offset_y;
+ int width;
+ int height;
+
+ // Makes this UpdateRect a bounding box of this and other rect.
+ void Union(const UpdateRect& other);
+
+ // Makes this UpdateRect an intersection of this and other rect.
+ void Intersect(const UpdateRect& other);
+
+ // Sets everything to 0, making this UpdateRect a zero-size (empty) update.
+ void MakeEmptyUpdate();
+
+ bool IsEmpty() const;
+
+ // Per-member equality check. Empty rectangles with different offsets would
+ // be considered different.
+ bool operator==(const UpdateRect& other) const {
+ return other.offset_x == offset_x && other.offset_y == offset_y &&
+ other.width == width && other.height == height;
+ }
+
+ bool operator!=(const UpdateRect& other) const { return !(*this == other); }
+
+ // Scales update_rect given original frame dimensions.
+ // Cropping is applied first, then rect is scaled down.
+ // Update rect is snapped to 2x2 grid due to possible UV subsampling and
+ // then expanded by additional 2 pixels in each direction to accommodate any
+ // possible scaling artifacts.
+ // Note, close but not equal update_rects on original frame may result in
+ // the same scaled update rects.
+ UpdateRect ScaleWithFrame(int frame_width,
+ int frame_height,
+ int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) const;
+ };
+
+ struct RTC_EXPORT ProcessingTime {
+ TimeDelta Elapsed() const { return finish - start; }
+ Timestamp start;
+ Timestamp finish;
+ };
+
+ struct RTC_EXPORT RenderParameters {
+ bool use_low_latency_rendering = false;
+ absl::optional<int32_t> max_composition_delay_in_frames;
+
+ bool operator==(const RenderParameters& other) const {
+ return other.use_low_latency_rendering == use_low_latency_rendering &&
+ other.max_composition_delay_in_frames ==
+ max_composition_delay_in_frames;
+ }
+
+ bool operator!=(const RenderParameters& other) const {
+ return !(*this == other);
+ }
+ };
+
+ // Preferred way of building VideoFrame objects.
+ class RTC_EXPORT Builder {
+ public:
+ Builder();
+ ~Builder();
+
+ VideoFrame build();
+ Builder& set_video_frame_buffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+ Builder& set_timestamp_ms(int64_t timestamp_ms);
+ Builder& set_timestamp_us(int64_t timestamp_us);
+ Builder& set_timestamp_rtp(uint32_t timestamp_rtp);
+ Builder& set_ntp_time_ms(int64_t ntp_time_ms);
+ Builder& set_rotation(VideoRotation rotation);
+ Builder& set_color_space(const absl::optional<ColorSpace>& color_space);
+ Builder& set_color_space(const ColorSpace* color_space);
+ Builder& set_id(uint16_t id);
+ Builder& set_update_rect(const absl::optional<UpdateRect>& update_rect);
+ Builder& set_packet_infos(RtpPacketInfos packet_infos);
+
+ private:
+ uint16_t id_ = kNotSetId;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
+ int64_t timestamp_us_ = 0;
+ uint32_t timestamp_rtp_ = 0;
+ int64_t ntp_time_ms_ = 0;
+ VideoRotation rotation_ = kVideoRotation_0;
+ absl::optional<ColorSpace> color_space_;
+ RenderParameters render_parameters_;
+ absl::optional<UpdateRect> update_rect_;
+ RtpPacketInfos packet_infos_;
+ };
+
+ // To be deprecated. Migrate all use to Builder.
+ VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ webrtc::VideoRotation rotation,
+ int64_t timestamp_us);
+ VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ uint32_t timestamp_rtp,
+ int64_t render_time_ms,
+ VideoRotation rotation);
+
+ ~VideoFrame();
+
+ // Support move and copy.
+ VideoFrame(const VideoFrame&);
+ VideoFrame(VideoFrame&&);
+ VideoFrame& operator=(const VideoFrame&);
+ VideoFrame& operator=(VideoFrame&&);
+
+ // Get frame width.
+ int width() const;
+ // Get frame height.
+ int height() const;
+ // Get frame size in pixels.
+ uint32_t size() const;
+
+ // Get frame ID. Returns `kNotSetId` if ID is not set. Not guaranteed to be
+ // transferred from the sender to the receiver, but preserved on the sender
+ // side. The id should be propagated between all frame modifications during
+ // its lifetime from capturing to sending as encoded image. It is intended to
+ // be unique over a time window of a few minutes for the peer connection to
+ // which the corresponding video stream belongs to.
+ uint16_t id() const { return id_; }
+ void set_id(uint16_t id) { id_ = id; }
+
+ // System monotonic clock, same timebase as rtc::TimeMicros().
+ int64_t timestamp_us() const { return timestamp_us_; }
+ void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; }
+
+ // Set frame timestamp (90kHz).
+ void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
+
+ // Get frame timestamp (90kHz).
+ uint32_t timestamp() const { return timestamp_rtp_; }
+
+ // Set capture ntp time in milliseconds.
+ void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
+
+ // Get capture ntp time in milliseconds.
+ int64_t ntp_time_ms() const { return ntp_time_ms_; }
+
+ // Naming convention for Coordination of Video Orientation. Please see
+ // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
+ //
+ // "pending rotation" or "pending" = a frame that has a VideoRotation > 0.
+ //
+ // "not pending" = a frame that has a VideoRotation == 0.
+ //
+ // "apply rotation" = modify a frame from being "pending" to being "not
+ // pending" rotation (a no-op for "unrotated").
+ //
+ VideoRotation rotation() const { return rotation_; }
+ void set_rotation(VideoRotation rotation) { rotation_ = rotation; }
+
+ // Get color space when available.
+ const absl::optional<ColorSpace>& color_space() const { return color_space_; }
+ void set_color_space(const absl::optional<ColorSpace>& color_space) {
+ color_space_ = color_space;
+ }
+
+ RenderParameters render_parameters() const { return render_parameters_; }
+ void set_render_parameters(const RenderParameters& render_parameters) {
+ render_parameters_ = render_parameters;
+ }
+
+ // Deprecated in favor of render_parameters, will be removed once Chromium is
+ // updated. max_composition_delay_in_frames() is used in an experiment of a
+ // low-latency renderer algorithm see crbug.com/1138888.
+ [[deprecated("Use render_parameters() instead.")]] absl::optional<int32_t>
+ max_composition_delay_in_frames() const {
+ return render_parameters_.max_composition_delay_in_frames;
+ }
+
+ // Get render time in milliseconds.
+ int64_t render_time_ms() const;
+
+ // Return the underlying buffer. Never nullptr for a properly
+ // initialized VideoFrame.
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
+
+ void set_video_frame_buffer(
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
+
+ // Return true if the frame is stored in a texture.
+ bool is_texture() const {
+ return video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative;
+ }
+
+ bool has_update_rect() const { return update_rect_.has_value(); }
+
+ // Returns update_rect set by the builder or set_update_rect() or whole frame
+ // rect if no update rect is available.
+ UpdateRect update_rect() const {
+ return update_rect_.value_or(UpdateRect{0, 0, width(), height()});
+ }
+
+ // Rectangle must be within the frame dimensions.
+ void set_update_rect(const VideoFrame::UpdateRect& update_rect) {
+ RTC_DCHECK_GE(update_rect.offset_x, 0);
+ RTC_DCHECK_GE(update_rect.offset_y, 0);
+ RTC_DCHECK_LE(update_rect.offset_x + update_rect.width, width());
+ RTC_DCHECK_LE(update_rect.offset_y + update_rect.height, height());
+ update_rect_ = update_rect;
+ }
+
+ void clear_update_rect() { update_rect_ = absl::nullopt; }
+
+ // Get information about packets used to assemble this video frame. Might be
+ // empty if the information isn't available.
+ const RtpPacketInfos& packet_infos() const { return packet_infos_; }
+ void set_packet_infos(RtpPacketInfos value) {
+ packet_infos_ = std::move(value);
+ }
+
+ const absl::optional<ProcessingTime> processing_time() const {
+ return processing_time_;
+ }
+ void set_processing_time(const ProcessingTime& processing_time) {
+ processing_time_ = processing_time;
+ }
+
+ private:
+ VideoFrame(uint16_t id,
+ const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ int64_t timestamp_us,
+ uint32_t timestamp_rtp,
+ int64_t ntp_time_ms,
+ VideoRotation rotation,
+ const absl::optional<ColorSpace>& color_space,
+ const RenderParameters& render_parameters,
+ const absl::optional<UpdateRect>& update_rect,
+ RtpPacketInfos packet_infos);
+
+ uint16_t id_;
+ // An opaque reference counted handle that stores the pixel data.
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
+ uint32_t timestamp_rtp_;
+ int64_t ntp_time_ms_;
+ int64_t timestamp_us_;
+ VideoRotation rotation_;
+ absl::optional<ColorSpace> color_space_;
+ // Contains parameters that affect have the frame should be rendered.
+ RenderParameters render_parameters_;
+ // Updated since the last frame area. If present it means that the bounding
+ // box of all the changes is within the rectangular area and is close to it.
+ // If absent, it means that there's no information about the change at all and
+ // update_rect() will return a rectangle corresponding to the entire frame.
+ absl::optional<UpdateRect> update_rect_;
+ // Information about packets used to assemble this video frame. This is needed
+ // by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's
+ // MediaStreamTrack, in order to implement getContributingSources(). See:
+ // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
+ RtpPacketInfos packet_infos_;
+ // Processing timestamps of the frame. For received video frames these are the
+ // timestamps when the frame is sent to the decoder and the decoded image
+ // returned from the decoder.
+ // Currently, not set for locally captured video frames.
+ absl::optional<ProcessingTime> processing_time_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/api/video/video_frame_buffer.cc b/third_party/libwebrtc/api/video/video_frame_buffer.cc
new file mode 100644
index 0000000000..374b438adc
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_buffer.cc
@@ -0,0 +1,242 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_buffer.h"
+
+#include "api/video/i420_buffer.h"
+#include "api/video/i422_buffer.h"
+#include "api/video/i444_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBuffer::CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ rtc::scoped_refptr<I420Buffer> result =
+ I420Buffer::Create(scaled_width, scaled_height);
+ result->CropAndScaleFrom(*this->ToI420(), offset_x, offset_y, crop_width,
+ crop_height);
+ return result;
+}
+
+const I420BufferInterface* VideoFrameBuffer::GetI420() const {
+ // Overridden by subclasses that can return an I420 buffer without any
+ // conversion, in particular, I420BufferInterface.
+ return nullptr;
+}
+
+const I420ABufferInterface* VideoFrameBuffer::GetI420A() const {
+ RTC_CHECK(type() == Type::kI420A);
+ return static_cast<const I420ABufferInterface*>(this);
+}
+
+const I444BufferInterface* VideoFrameBuffer::GetI444() const {
+ RTC_CHECK(type() == Type::kI444);
+ return static_cast<const I444BufferInterface*>(this);
+}
+
+const I422BufferInterface* VideoFrameBuffer::GetI422() const {
+ RTC_CHECK(type() == Type::kI422);
+ return static_cast<const I422BufferInterface*>(this);
+}
+
+const I010BufferInterface* VideoFrameBuffer::GetI010() const {
+ RTC_CHECK(type() == Type::kI010);
+ return static_cast<const I010BufferInterface*>(this);
+}
+
+const I210BufferInterface* VideoFrameBuffer::GetI210() const {
+ RTC_CHECK(type() == Type::kI210);
+ return static_cast<const I210BufferInterface*>(this);
+}
+
+const I410BufferInterface* VideoFrameBuffer::GetI410() const {
+ RTC_CHECK(type() == Type::kI410);
+ return static_cast<const I410BufferInterface*>(this);
+}
+
+const NV12BufferInterface* VideoFrameBuffer::GetNV12() const {
+ RTC_CHECK(type() == Type::kNV12);
+ return static_cast<const NV12BufferInterface*>(this);
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBuffer::GetMappedFrameBuffer(
+ rtc::ArrayView<Type> types) {
+ RTC_CHECK(type() == Type::kNative);
+ return nullptr;
+}
+
+VideoFrameBuffer::Type I420BufferInterface::type() const {
+ return Type::kI420;
+}
+
+const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) {
+ switch (type) {
+ case VideoFrameBuffer::Type::kNative:
+ return "kNative";
+ case VideoFrameBuffer::Type::kI420:
+ return "kI420";
+ case VideoFrameBuffer::Type::kI420A:
+ return "kI420A";
+ case VideoFrameBuffer::Type::kI444:
+ return "kI444";
+ case VideoFrameBuffer::Type::kI422:
+ return "kI422";
+ case VideoFrameBuffer::Type::kI010:
+ return "kI010";
+ case VideoFrameBuffer::Type::kI210:
+ return "kI210";
+ case VideoFrameBuffer::Type::kI410:
+ return "kI410";
+ case VideoFrameBuffer::Type::kNV12:
+ return "kNV12";
+ default:
+ RTC_DCHECK_NOTREACHED();
+ }
+}
+
+int I420BufferInterface::ChromaWidth() const {
+ return (width() + 1) / 2;
+}
+
+int I420BufferInterface::ChromaHeight() const {
+ return (height() + 1) / 2;
+}
+
+rtc::scoped_refptr<I420BufferInterface> I420BufferInterface::ToI420() {
+ return rtc::scoped_refptr<I420BufferInterface>(this);
+}
+
+const I420BufferInterface* I420BufferInterface::GetI420() const {
+ return this;
+}
+
+VideoFrameBuffer::Type I420ABufferInterface::type() const {
+ return Type::kI420A;
+}
+
+VideoFrameBuffer::Type I444BufferInterface::type() const {
+ return Type::kI444;
+}
+
+int I444BufferInterface::ChromaWidth() const {
+ return width();
+}
+
+int I444BufferInterface::ChromaHeight() const {
+ return height();
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> I444BufferInterface::CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ rtc::scoped_refptr<I444Buffer> result =
+ I444Buffer::Create(scaled_width, scaled_height);
+ result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
+ return result;
+}
+
+VideoFrameBuffer::Type I422BufferInterface::type() const {
+ return Type::kI422;
+}
+
+int I422BufferInterface::ChromaWidth() const {
+ return (width() + 1) / 2;
+}
+
+int I422BufferInterface::ChromaHeight() const {
+ return height();
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> I422BufferInterface::CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ rtc::scoped_refptr<I422Buffer> result =
+ I422Buffer::Create(scaled_width, scaled_height);
+ result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
+ return result;
+}
+
+VideoFrameBuffer::Type I010BufferInterface::type() const {
+ return Type::kI010;
+}
+
+int I010BufferInterface::ChromaWidth() const {
+ return (width() + 1) / 2;
+}
+
+int I010BufferInterface::ChromaHeight() const {
+ return (height() + 1) / 2;
+}
+
+VideoFrameBuffer::Type I210BufferInterface::type() const {
+ return Type::kI210;
+}
+
+int I210BufferInterface::ChromaWidth() const {
+ return (width() + 1) / 2;
+}
+
+int I210BufferInterface::ChromaHeight() const {
+ return height();
+}
+
+VideoFrameBuffer::Type I410BufferInterface::type() const {
+ return Type::kI410;
+}
+
+int I410BufferInterface::ChromaWidth() const {
+ return width();
+}
+
+int I410BufferInterface::ChromaHeight() const {
+ return height();
+}
+
+VideoFrameBuffer::Type NV12BufferInterface::type() const {
+ return Type::kNV12;
+}
+
+int NV12BufferInterface::ChromaWidth() const {
+ return (width() + 1) / 2;
+}
+
+int NV12BufferInterface::ChromaHeight() const {
+ return (height() + 1) / 2;
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> NV12BufferInterface::CropAndScale(
+ int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) {
+ rtc::scoped_refptr<NV12Buffer> result =
+ NV12Buffer::Create(scaled_width, scaled_height);
+ result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
+ return result;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_frame_buffer.h b/third_party/libwebrtc/api/video/video_frame_buffer.h
new file mode 100644
index 0000000000..aaf786699f
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_buffer.h
@@ -0,0 +1,325 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_BUFFER_H_
+#define API_VIDEO_VIDEO_FRAME_BUFFER_H_
+
+#include <stdint.h>
+
+#include "api/array_view.h"
+#include "api/scoped_refptr.h"
+#include "rtc_base/ref_count.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class I420BufferInterface;
+class I420ABufferInterface;
+class I422BufferInterface;
+class I444BufferInterface;
+class I010BufferInterface;
+class I210BufferInterface;
+class I410BufferInterface;
+class NV12BufferInterface;
+
+// Base class for frame buffers of different types of pixel format and storage.
+// The tag in type() indicates how the data is represented, and each type is
+// implemented as a subclass. To access the pixel data, call the appropriate
+// GetXXX() function, where XXX represents the type. There is also a function
+// ToI420() that returns a frame buffer in I420 format, converting from the
+// underlying representation if necessary. I420 is the most widely accepted
+// format and serves as a fallback for video sinks that can only handle I420,
+// e.g. the internal WebRTC software encoders. A special enum value 'kNative' is
+// provided for external clients to implement their own frame buffer
+// representations, e.g. as textures. The external client can produce such
+// native frame buffers from custom video sources, and then cast it back to the
+// correct subclass in custom video sinks. The purpose of this is to improve
+// performance by providing an optimized path without intermediate conversions.
+// Frame metadata such as rotation and timestamp are stored in
+// webrtc::VideoFrame, and not here.
+class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface {
+ public:
+ // New frame buffer types will be added conservatively when there is an
+ // opportunity to optimize the path between some pair of video source and
+ // video sink.
+ // GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
+ // GENERATED_JAVA_CLASS_NAME_OVERRIDE: VideoFrameBufferType
+ enum class Type {
+ kNative,
+ kI420,
+ kI420A,
+ kI422,
+ kI444,
+ kI010,
+ kI210,
+ kI410,
+ kNV12,
+ };
+
+ // This function specifies in what pixel format the data is stored in.
+ virtual Type type() const = 0;
+
+ // The resolution of the frame in pixels. For formats where some planes are
+ // subsampled, this is the highest-resolution plane.
+ virtual int width() const = 0;
+ virtual int height() const = 0;
+
+ // Returns a memory-backed frame buffer in I420 format. If the pixel data is
+ // in another format, a conversion will take place. All implementations must
+ // provide a fallback to I420 for compatibility with e.g. the internal WebRTC
+ // software encoders.
+ // Conversion may fail, for example if reading the pixel data from a texture
+ // fails. If the conversion fails, nullptr is returned.
+ virtual rtc::scoped_refptr<I420BufferInterface> ToI420() = 0;
+
+ // GetI420() methods should return I420 buffer if conversion is trivial, i.e
+ // no change for binary data is needed. Otherwise these methods should return
+ // nullptr. One example of buffer with that property is
+ // WebrtcVideoFrameAdapter in Chrome - it's I420 buffer backed by a shared
+ // memory buffer. Therefore it must have type kNative. Yet, ToI420()
+ // doesn't affect binary data at all. Another example is any I420A buffer.
+ // TODO(https://crbug.com/webrtc/12021): Make this method non-virtual and
+ // behave as the other GetXXX methods below.
+ virtual const I420BufferInterface* GetI420() const;
+
+ // A format specific scale function. Default implementation works by
+ // converting to I420. But more efficient implementations may override it,
+ // especially for kNative.
+ // First, the image is cropped to `crop_width` and `crop_height` and then
+ // scaled to `scaled_width` and `scaled_height`.
+ virtual rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height);
+
+ // Alias for common use case.
+ rtc::scoped_refptr<VideoFrameBuffer> Scale(int scaled_width,
+ int scaled_height) {
+ return CropAndScale(0, 0, width(), height(), scaled_width, scaled_height);
+ }
+
+ // These functions should only be called if type() is of the correct type.
+ // Calling with a different type will result in a crash.
+ const I420ABufferInterface* GetI420A() const;
+ const I422BufferInterface* GetI422() const;
+ const I444BufferInterface* GetI444() const;
+ const I010BufferInterface* GetI010() const;
+ const I210BufferInterface* GetI210() const;
+ const I410BufferInterface* GetI410() const;
+ const NV12BufferInterface* GetNV12() const;
+
+ // From a kNative frame, returns a VideoFrameBuffer with a pixel format in
+ // the list of types that is in the main memory with a pixel perfect
+ // conversion for encoding with a software encoder. Returns nullptr if the
+ // frame type is not supported, mapping is not possible, or if the kNative
+ // frame has not implemented this method. Only callable if type() is kNative.
+ virtual rtc::scoped_refptr<VideoFrameBuffer> GetMappedFrameBuffer(
+ rtc::ArrayView<Type> types);
+
+ protected:
+ ~VideoFrameBuffer() override {}
+};
+
+// Update when VideoFrameBuffer::Type is updated.
+const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type);
+
+// This interface represents planar formats.
+class PlanarYuvBuffer : public VideoFrameBuffer {
+ public:
+ virtual int ChromaWidth() const = 0;
+ virtual int ChromaHeight() const = 0;
+
+ // Returns the number of steps(in terms of Data*() return type) between
+ // successive rows for a given plane.
+ virtual int StrideY() const = 0;
+ virtual int StrideU() const = 0;
+ virtual int StrideV() const = 0;
+
+ protected:
+ ~PlanarYuvBuffer() override {}
+};
+
+// This interface represents 8-bit color depth formats: Type::kI420,
+// Type::kI420A, Type::kI422 and Type::kI444.
+class PlanarYuv8Buffer : public PlanarYuvBuffer {
+ public:
+ // Returns pointer to the pixel data for a given plane. The memory is owned by
+ // the VideoFrameBuffer object and must not be freed by the caller.
+ virtual const uint8_t* DataY() const = 0;
+ virtual const uint8_t* DataU() const = 0;
+ virtual const uint8_t* DataV() const = 0;
+
+ protected:
+ ~PlanarYuv8Buffer() override {}
+};
+
+class RTC_EXPORT I420BufferInterface : public PlanarYuv8Buffer {
+ public:
+ Type type() const override;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() final;
+ const I420BufferInterface* GetI420() const final;
+
+ protected:
+ ~I420BufferInterface() override {}
+};
+
+class RTC_EXPORT I420ABufferInterface : public I420BufferInterface {
+ public:
+ Type type() const final;
+ virtual const uint8_t* DataA() const = 0;
+ virtual int StrideA() const = 0;
+
+ protected:
+ ~I420ABufferInterface() override {}
+};
+
+// Represents Type::kI422, 4:2:2 planar with 8 bits per pixel.
+class I422BufferInterface : public PlanarYuv8Buffer {
+ public:
+ Type type() const final;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) override;
+
+ protected:
+ ~I422BufferInterface() override {}
+};
+
+// Represents Type::kI444, 4:4:4 planar with 8 bits per pixel.
+class I444BufferInterface : public PlanarYuv8Buffer {
+ public:
+ Type type() const final;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) override;
+
+ protected:
+ ~I444BufferInterface() override {}
+};
+
+// This interface represents 8-bit to 16-bit color depth formats: Type::kI010 or
+// Type::kI210 .
+class PlanarYuv16BBuffer : public PlanarYuvBuffer {
+ public:
+ // Returns pointer to the pixel data for a given plane. The memory is owned by
+ // the VideoFrameBuffer object and must not be freed by the caller.
+ virtual const uint16_t* DataY() const = 0;
+ virtual const uint16_t* DataU() const = 0;
+ virtual const uint16_t* DataV() const = 0;
+
+ protected:
+ ~PlanarYuv16BBuffer() override {}
+};
+
+// Represents Type::kI010, allocates 16 bits per pixel and fills 10 least
+// significant bits with color information.
+class I010BufferInterface : public PlanarYuv16BBuffer {
+ public:
+ Type type() const override;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ protected:
+ ~I010BufferInterface() override {}
+};
+
+// Represents Type::kI210, allocates 16 bits per pixel and fills 10 least
+// significant bits with color information.
+class I210BufferInterface : public PlanarYuv16BBuffer {
+ public:
+ Type type() const override;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ protected:
+ ~I210BufferInterface() override {}
+};
+
+// Represents Type::kI410, allocates 16 bits per pixel and fills 10 least
+// significant bits with color information.
+class I410BufferInterface : public PlanarYuv16BBuffer {
+ public:
+ Type type() const override;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ protected:
+ ~I410BufferInterface() override {}
+};
+
+class BiplanarYuvBuffer : public VideoFrameBuffer {
+ public:
+ virtual int ChromaWidth() const = 0;
+ virtual int ChromaHeight() const = 0;
+
+ // Returns the number of steps(in terms of Data*() return type) between
+ // successive rows for a given plane.
+ virtual int StrideY() const = 0;
+ virtual int StrideUV() const = 0;
+
+ protected:
+ ~BiplanarYuvBuffer() override {}
+};
+
+class BiplanarYuv8Buffer : public BiplanarYuvBuffer {
+ public:
+ virtual const uint8_t* DataY() const = 0;
+ virtual const uint8_t* DataUV() const = 0;
+
+ protected:
+ ~BiplanarYuv8Buffer() override {}
+};
+
+// Represents Type::kNV12. NV12 is full resolution Y and half-resolution
+// interleved UV.
+class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer {
+ public:
+ Type type() const override;
+
+ int ChromaWidth() const final;
+ int ChromaHeight() const final;
+
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
+ int offset_y,
+ int crop_width,
+ int crop_height,
+ int scaled_width,
+ int scaled_height) override;
+
+ protected:
+ ~NV12BufferInterface() override {}
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_
diff --git a/third_party/libwebrtc/api/video/video_frame_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_gn/moz.build
new file mode 100644
index 0000000000..7a4e9fdc2c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_gn/moz.build
@@ -0,0 +1,236 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/media/libyuv/",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+SOURCES += [
+ "/third_party/libwebrtc/api/video/i422_buffer.cc",
+ "/third_party/libwebrtc/api/video/i444_buffer.cc"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/i420_buffer.cc",
+ "/third_party/libwebrtc/api/video/nv12_buffer.cc",
+ "/third_party/libwebrtc/api/video/video_frame.cc",
+ "/third_party/libwebrtc/api/video/video_frame_buffer.cc",
+ "/third_party/libwebrtc/api/video/video_source_interface.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_frame_gn")
diff --git a/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build
new file mode 100644
index 0000000000..e56d43a299
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/media/libyuv/",
+ "/media/libyuv/libyuv/include/",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+SOURCES += [
+ "/third_party/libwebrtc/api/video/i210_buffer.cc",
+ "/third_party/libwebrtc/api/video/i410_buffer.cc"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/i010_buffer.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_frame_i010_gn")
diff --git a/third_party/libwebrtc/api/video/video_frame_metadata.cc b/third_party/libwebrtc/api/video/video_frame_metadata.cc
new file mode 100644
index 0000000000..e1863e9c13
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_metadata.cc
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_frame_metadata.h"
+
+#include <utility>
+
+namespace webrtc {
+
+VideoFrameMetadata::VideoFrameMetadata() = default;
+
+VideoFrameType VideoFrameMetadata::GetFrameType() const {
+ return frame_type_;
+}
+
+void VideoFrameMetadata::SetFrameType(VideoFrameType frame_type) {
+ frame_type_ = frame_type;
+}
+
+uint16_t VideoFrameMetadata::GetWidth() const {
+ return width_;
+}
+
+void VideoFrameMetadata::SetWidth(uint16_t width) {
+ width_ = width;
+}
+
+uint16_t VideoFrameMetadata::GetHeight() const {
+ return height_;
+}
+
+void VideoFrameMetadata::SetHeight(uint16_t height) {
+ height_ = height;
+}
+
+VideoRotation VideoFrameMetadata::GetRotation() const {
+ return rotation_;
+}
+
+void VideoFrameMetadata::SetRotation(VideoRotation rotation) {
+ rotation_ = rotation;
+}
+
+VideoContentType VideoFrameMetadata::GetContentType() const {
+ return content_type_;
+}
+
+void VideoFrameMetadata::SetContentType(VideoContentType content_type) {
+ content_type_ = content_type;
+}
+
+absl::optional<int64_t> VideoFrameMetadata::GetFrameId() const {
+ return frame_id_;
+}
+
+void VideoFrameMetadata::SetFrameId(absl::optional<int64_t> frame_id) {
+ frame_id_ = frame_id;
+}
+
+int VideoFrameMetadata::GetSpatialIndex() const {
+ return spatial_index_;
+}
+
+void VideoFrameMetadata::SetSpatialIndex(int spatial_index) {
+ spatial_index_ = spatial_index;
+}
+
+int VideoFrameMetadata::GetTemporalIndex() const {
+ return temporal_index_;
+}
+
+void VideoFrameMetadata::SetTemporalIndex(int temporal_index) {
+ temporal_index_ = temporal_index;
+}
+
+rtc::ArrayView<const int64_t> VideoFrameMetadata::GetFrameDependencies() const {
+ return frame_dependencies_;
+}
+
+void VideoFrameMetadata::SetFrameDependencies(
+ rtc::ArrayView<const int64_t> frame_dependencies) {
+ frame_dependencies_.assign(frame_dependencies.begin(),
+ frame_dependencies.end());
+}
+
+rtc::ArrayView<const DecodeTargetIndication>
+VideoFrameMetadata::GetDecodeTargetIndications() const {
+ return decode_target_indications_;
+}
+
+void VideoFrameMetadata::SetDecodeTargetIndications(
+ rtc::ArrayView<const DecodeTargetIndication> decode_target_indications) {
+ decode_target_indications_.assign(decode_target_indications.begin(),
+ decode_target_indications.end());
+}
+
+bool VideoFrameMetadata::GetIsLastFrameInPicture() const {
+ return is_last_frame_in_picture_;
+}
+
+void VideoFrameMetadata::SetIsLastFrameInPicture(
+ bool is_last_frame_in_picture) {
+ is_last_frame_in_picture_ = is_last_frame_in_picture;
+}
+
+uint8_t VideoFrameMetadata::GetSimulcastIdx() const {
+ return simulcast_idx_;
+}
+
+void VideoFrameMetadata::SetSimulcastIdx(uint8_t simulcast_idx) {
+ simulcast_idx_ = simulcast_idx;
+}
+
+VideoCodecType VideoFrameMetadata::GetCodec() const {
+ return codec_;
+}
+
+void VideoFrameMetadata::SetCodec(VideoCodecType codec) {
+ codec_ = codec;
+}
+
+const RTPVideoHeaderCodecSpecifics&
+VideoFrameMetadata::GetRTPVideoHeaderCodecSpecifics() const {
+ return codec_specifics_;
+}
+
+void VideoFrameMetadata::SetRTPVideoHeaderCodecSpecifics(
+ RTPVideoHeaderCodecSpecifics codec_specifics) {
+ codec_specifics_ = std::move(codec_specifics);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_frame_metadata.h b/third_party/libwebrtc/api/video/video_frame_metadata.h
new file mode 100644
index 0000000000..2703f11324
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_metadata.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_
+#define API_VIDEO_VIDEO_FRAME_METADATA_H_
+
+#include <cstdint>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "absl/types/variant.h"
+#include "api/array_view.h"
+#include "api/transport/rtp/dependency_descriptor.h"
+#include "api/video/video_codec_type.h"
+#include "api/video/video_content_type.h"
+#include "api/video/video_frame_type.h"
+#include "api/video/video_rotation.h"
+#include "modules/video_coding/codecs/h264/include/h264_globals.h"
+#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
+#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+using RTPVideoHeaderCodecSpecifics = absl::variant<absl::monostate,
+ RTPVideoHeaderVP8,
+ RTPVideoHeaderVP9,
+ RTPVideoHeaderH264>;
+
+// A subset of metadata from the RTP video header, exposed in insertable streams
+// API.
+class RTC_EXPORT VideoFrameMetadata {
+ public:
+ VideoFrameMetadata();
+ VideoFrameMetadata(const VideoFrameMetadata&) = default;
+ VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default;
+
+ VideoFrameType GetFrameType() const;
+ void SetFrameType(VideoFrameType frame_type);
+
+ uint16_t GetWidth() const;
+ void SetWidth(uint16_t width);
+
+ uint16_t GetHeight() const;
+ void SetHeight(uint16_t height);
+
+ VideoRotation GetRotation() const;
+ void SetRotation(VideoRotation rotation);
+
+ VideoContentType GetContentType() const;
+ void SetContentType(VideoContentType content_type);
+
+ absl::optional<int64_t> GetFrameId() const;
+ void SetFrameId(absl::optional<int64_t> frame_id);
+
+ int GetSpatialIndex() const;
+ void SetSpatialIndex(int spatial_index);
+
+ int GetTemporalIndex() const;
+ void SetTemporalIndex(int temporal_index);
+
+ rtc::ArrayView<const int64_t> GetFrameDependencies() const;
+ void SetFrameDependencies(rtc::ArrayView<const int64_t> frame_dependencies);
+
+ rtc::ArrayView<const DecodeTargetIndication> GetDecodeTargetIndications()
+ const;
+ void SetDecodeTargetIndications(
+ rtc::ArrayView<const DecodeTargetIndication> decode_target_indications);
+
+ bool GetIsLastFrameInPicture() const;
+ void SetIsLastFrameInPicture(bool is_last_frame_in_picture);
+
+ uint8_t GetSimulcastIdx() const;
+ void SetSimulcastIdx(uint8_t simulcast_idx);
+
+ VideoCodecType GetCodec() const;
+ void SetCodec(VideoCodecType codec);
+
+ // Which varient is used depends on the VideoCodecType from GetCodecs().
+ const RTPVideoHeaderCodecSpecifics& GetRTPVideoHeaderCodecSpecifics() const;
+ void SetRTPVideoHeaderCodecSpecifics(
+ RTPVideoHeaderCodecSpecifics codec_specifics);
+
+ private:
+ VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame;
+ int16_t width_ = 0;
+ int16_t height_ = 0;
+ VideoRotation rotation_ = VideoRotation::kVideoRotation_0;
+ VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
+
+ // Corresponding to GenericDescriptorInfo.
+ absl::optional<int64_t> frame_id_;
+ int spatial_index_ = 0;
+ int temporal_index_ = 0;
+ absl::InlinedVector<int64_t, 5> frame_dependencies_;
+ absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications_;
+
+ bool is_last_frame_in_picture_ = true;
+ uint8_t simulcast_idx_ = 0;
+ VideoCodecType codec_ = VideoCodecType::kVideoCodecGeneric;
+ RTPVideoHeaderCodecSpecifics codec_specifics_;
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_
diff --git a/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build
new file mode 100644
index 0000000000..2d2221dbb5
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build
@@ -0,0 +1,225 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/video_frame_metadata.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_frame_metadata_gn")
diff --git a/third_party/libwebrtc/api/video/video_frame_type.h b/third_party/libwebrtc/api/video/video_frame_type.h
new file mode 100644
index 0000000000..4a96f1fe9e
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_type.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_FRAME_TYPE_H_
+#define API_VIDEO_VIDEO_FRAME_TYPE_H_
+
+namespace webrtc {
+
+enum class VideoFrameType {
+ kEmptyFrame = 0,
+ // Wire format for MultiplexEncodedImagePacker seems to depend on numerical
+ // values of these constants.
+ kVideoFrameKey = 3,
+ kVideoFrameDelta = 4,
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_FRAME_TYPE_H_
diff --git a/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build
new file mode 100644
index 0000000000..bc4c46bfbd
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_frame_type_gn")
diff --git a/third_party/libwebrtc/api/video/video_layers_allocation.h b/third_party/libwebrtc/api/video/video_layers_allocation.h
new file mode 100644
index 0000000000..39734151ae
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_layers_allocation.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
+#define API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
+
+#include <cstdint>
+
+#include "absl/container/inlined_vector.h"
+#include "api/units/data_rate.h"
+
+namespace webrtc {
+
+// This struct contains additional stream-level information needed by a
+// Selective Forwarding Middlebox to make relay decisions of RTP streams.
+struct VideoLayersAllocation {
+ static constexpr int kMaxSpatialIds = 4;
+ static constexpr int kMaxTemporalIds = 4;
+
+ friend bool operator==(const VideoLayersAllocation& lhs,
+ const VideoLayersAllocation& rhs) {
+ return lhs.rtp_stream_index == rhs.rtp_stream_index &&
+ lhs.resolution_and_frame_rate_is_valid ==
+ rhs.resolution_and_frame_rate_is_valid &&
+ lhs.active_spatial_layers == rhs.active_spatial_layers;
+ }
+
+ friend bool operator!=(const VideoLayersAllocation& lhs,
+ const VideoLayersAllocation& rhs) {
+ return !(lhs == rhs);
+ }
+
+ struct SpatialLayer {
+ friend bool operator==(const SpatialLayer& lhs, const SpatialLayer& rhs) {
+ return lhs.rtp_stream_index == rhs.rtp_stream_index &&
+ lhs.spatial_id == rhs.spatial_id &&
+ lhs.target_bitrate_per_temporal_layer ==
+ rhs.target_bitrate_per_temporal_layer &&
+ lhs.width == rhs.width && lhs.height == rhs.height &&
+ lhs.frame_rate_fps == rhs.frame_rate_fps;
+ }
+
+ friend bool operator!=(const SpatialLayer& lhs, const SpatialLayer& rhs) {
+ return !(lhs == rhs);
+ }
+ int rtp_stream_index = 0;
+ // Index of the spatial layer per `rtp_stream_index`.
+ int spatial_id = 0;
+ // Target bitrate per decode target.
+ absl::InlinedVector<DataRate, kMaxTemporalIds>
+ target_bitrate_per_temporal_layer;
+
+ // These fields are only valid if `resolution_and_frame_rate_is_valid` is
+ // true
+ uint16_t width = 0;
+ uint16_t height = 0;
+ // Max frame rate used in any temporal layer of this spatial layer.
+ uint8_t frame_rate_fps = 0;
+ };
+
+ // Index of the rtp stream this allocation is sent on. Used for mapping
+ // a SpatialLayer to a rtp stream.
+ int rtp_stream_index = 0;
+ bool resolution_and_frame_rate_is_valid = false;
+ absl::InlinedVector<SpatialLayer, kMaxSpatialIds> active_spatial_layers;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
diff --git a/third_party/libwebrtc/api/video/video_layers_allocation_gn/moz.build b/third_party/libwebrtc/api/video/video_layers_allocation_gn/moz.build
new file mode 100644
index 0000000000..ded4d37ab6
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_layers_allocation_gn/moz.build
@@ -0,0 +1,205 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_layers_allocation_gn")
diff --git a/third_party/libwebrtc/api/video/video_rotation.h b/third_party/libwebrtc/api/video/video_rotation.h
new file mode 100644
index 0000000000..6a29588ee5
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_rotation.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_ROTATION_H_
+#define API_VIDEO_VIDEO_ROTATION_H_
+
+namespace webrtc {
+
+// enum for clockwise rotation.
+enum VideoRotation {
+ kVideoRotation_0 = 0,
+ kVideoRotation_90 = 90,
+ kVideoRotation_180 = 180,
+ kVideoRotation_270 = 270
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_ROTATION_H_
diff --git a/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build b/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build
new file mode 100644
index 0000000000..fffe252ebd
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build
@@ -0,0 +1,228 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video/color_space.cc",
+ "/third_party/libwebrtc/api/video/hdr_metadata.cc",
+ "/third_party/libwebrtc/api/video/video_content_type.cc",
+ "/third_party/libwebrtc/api/video/video_timing.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_rtp_headers_gn")
diff --git a/third_party/libwebrtc/api/video/video_sink_interface.h b/third_party/libwebrtc/api/video/video_sink_interface.h
new file mode 100644
index 0000000000..9c1f5f3214
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_sink_interface.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_SINK_INTERFACE_H_
+#define API_VIDEO_VIDEO_SINK_INTERFACE_H_
+
+#include "absl/types/optional.h"
+#include "api/video_track_source_constraints.h"
+#include "rtc_base/checks.h"
+
+namespace rtc {
+
+template <typename VideoFrameT>
+class VideoSinkInterface {
+ public:
+ virtual ~VideoSinkInterface() = default;
+
+ virtual void OnFrame(const VideoFrameT& frame) = 0;
+
+ // Should be called by the source when it discards the frame due to rate
+ // limiting.
+ virtual void OnDiscardedFrame() {}
+
+ // Called on the network thread when video constraints change.
+ // TODO(crbug/1255737): make pure virtual once downstream project adapts.
+ virtual void OnConstraintsChanged(
+ const webrtc::VideoTrackSourceConstraints& constraints) {}
+};
+
+} // namespace rtc
+
+#endif // API_VIDEO_VIDEO_SINK_INTERFACE_H_
diff --git a/third_party/libwebrtc/api/video/video_source_interface.cc b/third_party/libwebrtc/api/video/video_source_interface.cc
new file mode 100644
index 0000000000..70a86c3d64
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_source_interface.cc
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_source_interface.h"
+
+namespace rtc {
+
+VideoSinkWants::VideoSinkWants() = default;
+VideoSinkWants::VideoSinkWants(const VideoSinkWants&) = default;
+VideoSinkWants::~VideoSinkWants() = default;
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/api/video/video_source_interface.h b/third_party/libwebrtc/api/video/video_source_interface.h
new file mode 100644
index 0000000000..38d0041718
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_source_interface.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
+#define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
+
+#include <limits>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video/video_sink_interface.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace rtc {
+
+// VideoSinkWants is used for notifying the source of properties a video frame
+// should have when it is delivered to a certain sink.
+struct RTC_EXPORT VideoSinkWants {
+ struct FrameSize {
+ FrameSize(int width, int height) : width(width), height(height) {}
+ FrameSize(const FrameSize&) = default;
+ ~FrameSize() = default;
+
+ int width;
+ int height;
+ };
+
+ VideoSinkWants();
+ VideoSinkWants(const VideoSinkWants&);
+ ~VideoSinkWants();
+ // Tells the source whether the sink wants frames with rotation applied.
+ // By default, any rotation must be applied by the sink.
+ bool rotation_applied = false;
+
+ // Tells the source that the sink only wants black frames.
+ bool black_frames = false;
+
+ // Tells the source the maximum number of pixels the sink wants.
+ int max_pixel_count = std::numeric_limits<int>::max();
+ // Tells the source the desired number of pixels the sinks wants. This will
+ // typically be used when stepping the resolution up again when conditions
+ // have improved after an earlier downgrade. The source should select the
+ // closest resolution to this pixel count, but if max_pixel_count is set, it
+ // still sets the absolute upper bound.
+ absl::optional<int> target_pixel_count;
+ // Tells the source the maximum framerate the sink wants.
+ int max_framerate_fps = std::numeric_limits<int>::max();
+
+ // Tells the source that the sink wants width and height of the video frames
+ // to be divisible by `resolution_alignment`.
+ // For example: With I420, this value would be a multiple of 2.
+ // Note that this field is unrelated to any horizontal or vertical stride
+ // requirements the encoder has on the incoming video frame buffers.
+ int resolution_alignment = 1;
+
+ // The resolutions that sink is configured to consume. If the sink is an
+ // encoder this is what the encoder is configured to encode. In singlecast we
+ // only encode one resolution, but in simulcast and SVC this can mean multiple
+ // resolutions per frame.
+ //
+ // The sink is always configured to consume a subset of the
+ // webrtc::VideoFrame's resolution. In the case of encoding, we usually encode
+ // at webrtc::VideoFrame's resolution but this may not always be the case due
+ // to scaleResolutionDownBy or turning off simulcast or SVC layers.
+ //
+ // For example, we may capture at 720p and due to adaptation (e.g. applying
+ // `max_pixel_count` constraints) create webrtc::VideoFrames of size 480p, but
+ // if we do scaleResolutionDownBy:2 then the only resolution we end up
+ // encoding is 240p. In this case we still need to provide webrtc::VideoFrames
+ // of size 480p but we can optimize internal buffers for 240p, avoiding
+ // downsampling to 480p if possible.
+ //
+ // Note that the `resolutions` can change while frames are in flight and
+ // should only be used as a hint when constructing the webrtc::VideoFrame.
+ std::vector<FrameSize> resolutions;
+
+ // This is the resolution requested by the user using RtpEncodingParameters.
+ absl::optional<FrameSize> requested_resolution;
+
+ // `active` : is (any) of the layers/sink(s) active.
+ bool is_active = true;
+
+ // This sub-struct contains information computed by VideoBroadcaster
+ // that aggregates several VideoSinkWants (and sends them to
+ // AdaptedVideoTrackSource).
+ struct Aggregates {
+ // `active_without_requested_resolution` is set by VideoBroadcaster
+ // when aggregating sink wants if there exists any sink (encoder) that is
+ // active but has not set the `requested_resolution`, i.e is relying on
+ // OnOutputFormatRequest to handle encode resolution.
+ bool any_active_without_requested_resolution = false;
+ };
+ absl::optional<Aggregates> aggregates;
+};
+
+inline bool operator==(const VideoSinkWants::FrameSize& a,
+ const VideoSinkWants::FrameSize& b) {
+ return a.width == b.width && a.height == b.height;
+}
+
+inline bool operator!=(const VideoSinkWants::FrameSize& a,
+ const VideoSinkWants::FrameSize& b) {
+ return !(a == b);
+}
+
+template <typename VideoFrameT>
+class VideoSourceInterface {
+ public:
+ virtual ~VideoSourceInterface() = default;
+
+ virtual void AddOrUpdateSink(VideoSinkInterface<VideoFrameT>* sink,
+ const VideoSinkWants& wants) = 0;
+ // RemoveSink must guarantee that at the time the method returns,
+ // there is no current and no future calls to VideoSinkInterface::OnFrame.
+ virtual void RemoveSink(VideoSinkInterface<VideoFrameT>* sink) = 0;
+
+ // Request underlying source to capture a new frame.
+ // TODO(crbug/1255737): make pure virtual once downstream projects adapt.
+ virtual void RequestRefreshFrame() {}
+};
+
+} // namespace rtc
+#endif // API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
diff --git a/third_party/libwebrtc/api/video/video_stream_decoder.h b/third_party/libwebrtc/api/video/video_stream_decoder.h
new file mode 100644
index 0000000000..8d71dd300c
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_decoder.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_STREAM_DECODER_H_
+#define API_VIDEO_VIDEO_STREAM_DECODER_H_
+
+#include <map>
+#include <memory>
+#include <utility>
+
+#include "api/units/time_delta.h"
+#include "api/video/encoded_frame.h"
+#include "api/video/video_content_type.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+// NOTE: This class is still under development and may change without notice.
+class VideoStreamDecoderInterface {
+ public:
+ class Callbacks {
+ public:
+ virtual ~Callbacks() = default;
+
+ struct FrameInfo {
+ absl::optional<int> qp;
+ VideoContentType content_type;
+ };
+
+ // Called when the VideoStreamDecoder enters a non-decodable state.
+ virtual void OnNonDecodableState() = 0;
+
+ virtual void OnContinuousUntil(int64_t frame_id) {}
+
+ virtual void OnDecodedFrame(VideoFrame frame,
+ const FrameInfo& frame_info) = 0;
+ };
+
+ virtual ~VideoStreamDecoderInterface() = default;
+
+ virtual void OnFrame(std::unique_ptr<EncodedFrame> frame) = 0;
+
+ virtual void SetMinPlayoutDelay(TimeDelta min_delay) = 0;
+ virtual void SetMaxPlayoutDelay(TimeDelta max_delay) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_STREAM_DECODER_H_
diff --git a/third_party/libwebrtc/api/video/video_stream_decoder_create.cc b/third_party/libwebrtc/api/video/video_stream_decoder_create.cc
new file mode 100644
index 0000000000..e14c3bc851
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_decoder_create.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_stream_decoder_create.h"
+
+#include <memory>
+
+#include "video/video_stream_decoder_impl.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoStreamDecoderInterface> CreateVideoStreamDecoder(
+ VideoStreamDecoderInterface::Callbacks* callbacks,
+ VideoDecoderFactory* decoder_factory,
+ TaskQueueFactory* task_queue_factory,
+ std::map<int, std::pair<SdpVideoFormat, int>> decoder_settings,
+ // TODO(jonaso, webrtc:10335): Consider what to do with factories
+ // vs. field trials.
+ const FieldTrialsView* field_trials) {
+ return std::make_unique<VideoStreamDecoderImpl>(
+ callbacks, decoder_factory, task_queue_factory,
+ std::move(decoder_settings), field_trials);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_stream_decoder_create.h b/third_party/libwebrtc/api/video/video_stream_decoder_create.h
new file mode 100644
index 0000000000..974fd804ce
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_decoder_create.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_
+#define API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_
+
+#include <map>
+#include <memory>
+#include <utility>
+
+#include "api/field_trials_view.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "api/video/video_stream_decoder.h"
+#include "api/video_codecs/sdp_video_format.h"
+
+namespace webrtc {
+// The `decoder_settings` parameter is a map between:
+// <payload type> --> <<video format>, <number of cores>>.
+// The video format is used when instantiating a decoder, and
+// the number of cores is used when initializing the decoder.
+std::unique_ptr<VideoStreamDecoderInterface> CreateVideoStreamDecoder(
+ VideoStreamDecoderInterface::Callbacks* callbacks,
+ VideoDecoderFactory* decoder_factory,
+ TaskQueueFactory* task_queue_factory,
+ std::map<int, std::pair<SdpVideoFormat, int>> decoder_settings,
+ const FieldTrialsView* field_trials = nullptr);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_
diff --git a/third_party/libwebrtc/api/video/video_stream_decoder_create_unittest.cc b/third_party/libwebrtc/api/video/video_stream_decoder_create_unittest.cc
new file mode 100644
index 0000000000..849a054a04
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_decoder_create_unittest.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_stream_decoder_create.h"
+
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace {
+
+class NullCallbacks : public VideoStreamDecoderInterface::Callbacks {
+ public:
+ ~NullCallbacks() override = default;
+ void OnNonDecodableState() override {}
+ void OnDecodedFrame(VideoFrame frame,
+ const VideoStreamDecoderInterface::Callbacks::FrameInfo&
+ frame_info) override {}
+};
+
+TEST(VideoStreamDecoderCreate, CreateVideoStreamDecoder) {
+ std::map<int, std::pair<SdpVideoFormat, int>> decoder_settings = {
+ {/*payload_type=*/111, {SdpVideoFormat("VP8"), /*number_of_cores=*/2}}};
+ NullCallbacks callbacks;
+ std::unique_ptr<VideoDecoderFactory> decoder_factory =
+ CreateBuiltinVideoDecoderFactory();
+
+ std::unique_ptr<TaskQueueFactory> task_queue_factory =
+ CreateDefaultTaskQueueFactory();
+
+ std::unique_ptr<VideoStreamDecoderInterface> decoder =
+ CreateVideoStreamDecoder(&callbacks, decoder_factory.get(),
+ task_queue_factory.get(), decoder_settings);
+ EXPECT_TRUE(decoder);
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build b/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build
new file mode 100644
index 0000000000..e36bc0df98
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_stream_encoder_gn")
diff --git a/third_party/libwebrtc/api/video/video_stream_encoder_settings.h b/third_party/libwebrtc/api/video/video_stream_encoder_settings.h
new file mode 100644
index 0000000000..3aee5b7050
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_stream_encoder_settings.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
+#define API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
+
+#include <string>
+
+#include "api/video/video_bitrate_allocator_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+
+namespace webrtc {
+
+class EncoderSwitchRequestCallback {
+ public:
+ virtual ~EncoderSwitchRequestCallback() {}
+
+ // Requests switch to next negotiated encoder.
+ virtual void RequestEncoderFallback() = 0;
+
+ // Requests switch to a specific encoder. If the encoder is not available and
+ // `allow_default_fallback` is `true` the default fallback is invoked.
+ virtual void RequestEncoderSwitch(const SdpVideoFormat& format,
+ bool allow_default_fallback) = 0;
+};
+
+struct VideoStreamEncoderSettings {
+ explicit VideoStreamEncoderSettings(
+ const VideoEncoder::Capabilities& capabilities)
+ : capabilities(capabilities) {}
+
+ // Enables the new method to estimate the cpu load from encoding, used for
+ // cpu adaptation.
+ bool experiment_cpu_load_estimator = false;
+
+ // Ownership stays with WebrtcVideoEngine (delegated from PeerConnection).
+ VideoEncoderFactory* encoder_factory = nullptr;
+
+ // Requests the WebRtcVideoChannel to perform a codec switch.
+ EncoderSwitchRequestCallback* encoder_switch_request_callback = nullptr;
+
+ // Ownership stays with WebrtcVideoEngine (delegated from PeerConnection).
+ VideoBitrateAllocatorFactory* bitrate_allocator_factory = nullptr;
+
+ // Negotiated capabilities which the VideoEncoder may expect the other
+ // side to use.
+ VideoEncoder::Capabilities capabilities;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
diff --git a/third_party/libwebrtc/api/video/video_timing.cc b/third_party/libwebrtc/api/video/video_timing.cc
new file mode 100644
index 0000000000..0483c20e66
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_timing.cc
@@ -0,0 +1,101 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/video_timing.h"
+
+#include "api/array_view.h"
+#include "api/units/time_delta.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) {
+ if (time_ms < base_ms) {
+ RTC_DLOG(LS_ERROR) << "Delta " << (time_ms - base_ms)
+ << "ms expected to be positive";
+ }
+ return rtc::saturated_cast<uint16_t>(time_ms - base_ms);
+}
+
+uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) {
+ if (delta < TimeDelta::Zero()) {
+ RTC_DLOG(LS_ERROR) << "Delta " << delta.ms()
+ << "ms expected to be positive";
+ }
+ return rtc::saturated_cast<uint16_t>(delta.ms());
+}
+
+TimingFrameInfo::TimingFrameInfo()
+ : rtp_timestamp(0),
+ capture_time_ms(-1),
+ encode_start_ms(-1),
+ encode_finish_ms(-1),
+ packetization_finish_ms(-1),
+ pacer_exit_ms(-1),
+ network_timestamp_ms(-1),
+ network2_timestamp_ms(-1),
+ receive_start_ms(-1),
+ receive_finish_ms(-1),
+ decode_start_ms(-1),
+ decode_finish_ms(-1),
+ render_time_ms(-1),
+ flags(VideoSendTiming::kNotTriggered) {}
+
+int64_t TimingFrameInfo::EndToEndDelay() const {
+ return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
+}
+
+bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
+ int64_t other_delay = other.EndToEndDelay();
+ return other_delay == -1 || EndToEndDelay() > other_delay;
+}
+
+bool TimingFrameInfo::operator<(const TimingFrameInfo& other) const {
+ return other.IsLongerThan(*this);
+}
+
+bool TimingFrameInfo::operator<=(const TimingFrameInfo& other) const {
+ return !IsLongerThan(other);
+}
+
+bool TimingFrameInfo::IsOutlier() const {
+ return !IsInvalid() && (flags & VideoSendTiming::kTriggeredBySize);
+}
+
+bool TimingFrameInfo::IsTimerTriggered() const {
+ return !IsInvalid() && (flags & VideoSendTiming::kTriggeredByTimer);
+}
+
+bool TimingFrameInfo::IsInvalid() const {
+ return flags == VideoSendTiming::kInvalid;
+}
+
+std::string TimingFrameInfo::ToString() const {
+ if (IsInvalid()) {
+ return "";
+ }
+
+ char buf[1024];
+ rtc::SimpleStringBuilder sb(buf);
+
+ sb << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms << ','
+ << encode_finish_ms << ',' << packetization_finish_ms << ','
+ << pacer_exit_ms << ',' << network_timestamp_ms << ','
+ << network2_timestamp_ms << ',' << receive_start_ms << ','
+ << receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
+ << ',' << render_time_ms << ',' << IsOutlier() << ','
+ << IsTimerTriggered();
+
+ return sb.str();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video/video_timing.h b/third_party/libwebrtc/api/video/video_timing.h
new file mode 100644
index 0000000000..698477a81a
--- /dev/null
+++ b/third_party/libwebrtc/api/video/video_timing.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_VIDEO_TIMING_H_
+#define API_VIDEO_VIDEO_TIMING_H_
+
+#include <stdint.h>
+
+#include <limits>
+#include <string>
+
+#include "api/units/time_delta.h"
+
+namespace webrtc {
+
+// Video timing timestamps in ms counted from capture_time_ms of a frame.
+// This structure represents data sent in video-timing RTP header extension.
+struct VideoSendTiming {
+ enum TimingFrameFlags : uint8_t {
+ kNotTriggered = 0, // Timing info valid, but not to be transmitted.
+ // Used on send-side only.
+ kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
+ kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
+ kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
+ };
+
+ // Returns |time_ms - base_ms| capped at max 16-bit value.
+ // Used to fill this data structure as per
+ // https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores
+ // 16-bit deltas of timestamps from packet capture time.
+ static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms);
+ static uint16_t GetDeltaCappedMs(TimeDelta delta);
+
+ uint16_t encode_start_delta_ms;
+ uint16_t encode_finish_delta_ms;
+ uint16_t packetization_finish_delta_ms;
+ uint16_t pacer_exit_delta_ms;
+ uint16_t network_timestamp_delta_ms;
+ uint16_t network2_timestamp_delta_ms;
+ uint8_t flags = TimingFrameFlags::kInvalid;
+};
+
+// Used to report precise timings of a 'timing frames'. Contains all important
+// timestamps for a lifetime of that specific frame. Reported as a string via
+// GetStats(). Only frame which took the longest between two GetStats calls is
+// reported.
+struct TimingFrameInfo {
+ TimingFrameInfo();
+
+ // Returns end-to-end delay of a frame, if sender and receiver timestamps are
+ // synchronized, -1 otherwise.
+ int64_t EndToEndDelay() const;
+
+ // Returns true if current frame took longer to process than `other` frame.
+ // If other frame's clocks are not synchronized, current frame is always
+ // preferred.
+ bool IsLongerThan(const TimingFrameInfo& other) const;
+
+ // Returns true if flags are set to indicate this frame was marked for tracing
+ // due to the size being outside some limit.
+ bool IsOutlier() const;
+
+ // Returns true if flags are set to indicate this frame was marked fro tracing
+ // due to cyclic timer.
+ bool IsTimerTriggered() const;
+
+ // Returns true if the timing data is marked as invalid, in which case it
+ // should be ignored.
+ bool IsInvalid() const;
+
+ std::string ToString() const;
+
+ bool operator<(const TimingFrameInfo& other) const;
+
+ bool operator<=(const TimingFrameInfo& other) const;
+
+ uint32_t rtp_timestamp; // Identifier of a frame.
+ // All timestamps below are in local monotonous clock of a receiver.
+ // If sender clock is not yet estimated, sender timestamps
+ // (capture_time_ms ... pacer_exit_ms) are negative values, still
+ // relatively correct.
+ int64_t capture_time_ms; // Captrue time of a frame.
+ int64_t encode_start_ms; // Encode start time.
+ int64_t encode_finish_ms; // Encode completion time.
+ int64_t packetization_finish_ms; // Time when frame was passed to pacer.
+ int64_t pacer_exit_ms; // Time when last packet was pushed out of pacer.
+ // Two in-network RTP processor timestamps: meaning is application specific.
+ int64_t network_timestamp_ms;
+ int64_t network2_timestamp_ms;
+ int64_t receive_start_ms; // First received packet time.
+ int64_t receive_finish_ms; // Last received packet time.
+ int64_t decode_start_ms; // Decode start time.
+ int64_t decode_finish_ms; // Decode completion time.
+ int64_t render_time_ms; // Proposed render time to insure smooth playback.
+
+ uint8_t flags; // Flags indicating validity and/or why tracing was triggered.
+};
+
+// Minimum and maximum playout delay values from capture to render.
+// These are best effort values.
+//
+// A value < 0 indicates no change from previous valid value.
+//
+// min = max = 0 indicates that the receiver should try and render
+// frame as soon as possible.
+//
+// min = x, max = y indicates that the receiver is free to adapt
+// in the range (x, y) based on network jitter.
+struct VideoPlayoutDelay {
+ VideoPlayoutDelay() = default;
+ VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {}
+ int min_ms = -1;
+ int max_ms = -1;
+
+ bool operator==(const VideoPlayoutDelay& rhs) const {
+ return min_ms == rhs.min_ms && max_ms == rhs.max_ms;
+ }
+};
+
+// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated.
+using PlayoutDelay = VideoPlayoutDelay;
+
+} // namespace webrtc
+
+#endif // API_VIDEO_VIDEO_TIMING_H_
diff --git a/third_party/libwebrtc/api/video_codecs/BUILD.gn b/third_party/libwebrtc/api/video_codecs/BUILD.gn
new file mode 100644
index 0000000000..101848a2e4
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/BUILD.gn
@@ -0,0 +1,314 @@
+# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../webrtc.gni")
+if (is_android) {
+ import("//build/config/android/config.gni")
+ import("//build/config/android/rules.gni")
+}
+
+rtc_source_set("scalability_mode") {
+ visibility = [ "*" ]
+ sources = [
+ "scalability_mode.cc",
+ "scalability_mode.h",
+ ]
+ deps = [
+ "../../rtc_base:checks",
+ "../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
+}
+
+rtc_source_set("scalability_mode_helper") {
+ visibility = [ "*" ]
+ sources = [
+ "scalability_mode_helper.cc",
+ "scalability_mode_helper.h",
+ ]
+ deps = [ "../../modules/video_coding/svc:scalability_mode_util" ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_library("video_codecs_api") {
+ visibility = [ "*" ]
+ sources = [
+ "av1_profile.cc",
+ "av1_profile.h",
+ "h264_profile_level_id.cc",
+ "h264_profile_level_id.h",
+ "sdp_video_format.cc",
+ "sdp_video_format.h",
+ "simulcast_stream.cc",
+ "simulcast_stream.h",
+ "spatial_layer.cc",
+ "spatial_layer.h",
+ "video_codec.cc",
+ "video_codec.h",
+ "video_decoder.cc",
+ "video_decoder.h",
+ "video_decoder_factory.h",
+ "video_encoder.cc",
+ "video_encoder.h",
+ "video_encoder_factory.h",
+ "vp8_frame_buffer_controller.h",
+ "vp8_frame_config.cc",
+ "vp8_frame_config.h",
+ "vp8_temporal_layers.cc",
+ "vp8_temporal_layers.h",
+ "vp9_profile.cc",
+ "vp9_profile.h",
+ ]
+
+ deps = [
+ ":scalability_mode",
+ "..:fec_controller_api",
+ "..:scoped_refptr",
+ "../../api:array_view",
+ "../../modules/video_coding:codec_globals_headers",
+ "../../rtc_base:checks",
+ "../../rtc_base:logging",
+ "../../rtc_base:macromagic",
+ "../../rtc_base:refcount",
+ "../../rtc_base:stringutils",
+ "../../rtc_base/system:rtc_export",
+ "../units:data_rate",
+ "../video:encoded_image",
+ "../video:render_resolution",
+ "../video:resolution",
+ "../video:video_bitrate_allocation",
+ "../video:video_codec_constants",
+ "../video:video_frame",
+ "../video:video_rtp_headers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/algorithm:container",
+ "//third_party/abseil-cpp/absl/container:inlined_vector",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("bitstream_parser_api") {
+ visibility = [ "*" ]
+ sources = [ "bitstream_parser.h" ]
+ deps = [ "..:array_view" ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+}
+
+rtc_library("builtin_video_decoder_factory") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "software_video_codecs",
+ ]
+ sources = [
+ "builtin_video_decoder_factory.cc",
+ "builtin_video_decoder_factory.h",
+ ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../api:scoped_refptr",
+ "../../media:rtc_internal_video_codecs",
+ "../../rtc_base/system:rtc_export",
+ ]
+}
+
+rtc_library("builtin_video_encoder_factory") {
+ visibility = [ "*" ]
+ allow_poison = [
+ "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove.
+ "software_video_codecs",
+ ]
+ sources = [
+ "builtin_video_encoder_factory.cc",
+ "builtin_video_encoder_factory.h",
+ ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../api:scoped_refptr",
+ "../../media:codec",
+ "../../media:media_constants",
+ "../../media:rtc_encoder_simulcast_proxy",
+ "../../media:rtc_internal_video_codecs",
+ "../../media:rtc_media_base",
+ "../../rtc_base:checks",
+ "../../rtc_base/system:rtc_export",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
+
+rtc_source_set("video_encoder_factory_template") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_encoder_factory_template.h" ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../api:array_view",
+ "../../modules/video_coding/svc:scalability_mode_util",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
+}
+
+rtc_source_set("video_encoder_factory_template_libvpx_vp8_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_encoder_factory_template_libvpx_vp8_adapter.h" ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../modules/video_coding:webrtc_vp8",
+ "../../modules/video_coding:webrtc_vp8_scalability",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ]
+}
+
+rtc_source_set("video_encoder_factory_template_libvpx_vp9_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_encoder_factory_template_libvpx_vp9_adapter.h" ]
+
+ deps = [ "../../modules/video_coding:webrtc_vp9" ]
+}
+
+rtc_source_set("video_encoder_factory_template_open_h264_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_encoder_factory_template_open_h264_adapter.h" ]
+
+ deps = [ "../../modules/video_coding:webrtc_h264" ]
+}
+
+rtc_source_set("video_encoder_factory_template_libaom_av1_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_encoder_factory_template_libaom_av1_adapter.h" ]
+
+ deps = [
+ ":scalability_mode",
+ ":video_codecs_api",
+ "../../modules/video_coding/codecs/av1:av1_svc_config",
+ "../../modules/video_coding/codecs/av1:libaom_av1_encoder",
+ "../../modules/video_coding/svc:scalability_mode_util",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ]
+}
+
+rtc_source_set("video_decoder_factory_template") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_decoder_factory_template.h" ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../api:array_view",
+ ]
+
+ absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ]
+}
+
+rtc_source_set("video_decoder_factory_template_libvpx_vp8_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_decoder_factory_template_libvpx_vp8_adapter.h" ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../modules/video_coding:webrtc_vp8",
+ ]
+}
+
+rtc_source_set("video_decoder_factory_template_libvpx_vp9_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_decoder_factory_template_libvpx_vp9_adapter.h" ]
+
+ deps = [ "../../modules/video_coding:webrtc_vp9" ]
+}
+
+rtc_source_set("video_decoder_factory_template_open_h264_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_decoder_factory_template_open_h264_adapter.h" ]
+
+ deps = [ "../../modules/video_coding:webrtc_h264" ]
+}
+
+rtc_source_set("video_decoder_factory_template_dav1d_adapter") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ public = [ "video_decoder_factory_template_dav1d_adapter.h" ]
+
+ deps = [
+ ":video_codecs_api",
+ "../../modules/video_coding/codecs/av1:dav1d_decoder",
+ ]
+}
+
+rtc_library("vp8_temporal_layers_factory") {
+ visibility = [ "*" ]
+ allow_poison = [ "software_video_codecs" ]
+ sources = [
+ "vp8_temporal_layers_factory.cc",
+ "vp8_temporal_layers_factory.h",
+ ]
+
+ deps = [
+ ":video_codecs_api",
+ "../:fec_controller_api",
+ "../../modules/video_coding:video_coding_utility",
+ "../../modules/video_coding:webrtc_vp8_temporal_layers",
+ "../../rtc_base:checks",
+ ]
+}
+
+rtc_library("rtc_software_fallback_wrappers") {
+ visibility = [ "*" ]
+
+ sources = [
+ "video_decoder_software_fallback_wrapper.cc",
+ "video_decoder_software_fallback_wrapper.h",
+ "video_encoder_software_fallback_wrapper.cc",
+ "video_encoder_software_fallback_wrapper.h",
+ ]
+
+ deps = [
+ ":video_codecs_api",
+ "..:fec_controller_api",
+ "../../api/video:video_frame",
+ "../../media:rtc_media_base",
+ "../../modules/video_coding:video_codec_interface",
+ "../../modules/video_coding:video_coding_utility",
+ "../../rtc_base:checks",
+ "../../rtc_base:event_tracer",
+ "../../rtc_base:logging",
+ "../../rtc_base/system:rtc_export",
+ "../../system_wrappers:field_trial",
+ "../../system_wrappers:metrics",
+ "../video:encoded_image",
+ "../video:video_bitrate_allocation",
+ "../video:video_frame",
+ "../video:video_rtp_headers",
+ ]
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/base:core_headers",
+ "//third_party/abseil-cpp/absl/strings",
+ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+}
diff --git a/third_party/libwebrtc/api/video_codecs/OWNERS b/third_party/libwebrtc/api/video_codecs/OWNERS
new file mode 100644
index 0000000000..f73b04f829
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/OWNERS
@@ -0,0 +1,4 @@
+magjed@webrtc.org
+sprang@webrtc.org
+brandtr@webrtc.org
+philipel@webrtc.org
diff --git a/third_party/libwebrtc/api/video_codecs/av1_profile.cc b/third_party/libwebrtc/api/video_codecs/av1_profile.cc
new file mode 100644
index 0000000000..eefe166d80
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/av1_profile.cc
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/av1_profile.h"
+
+#include <map>
+#include <utility>
+
+#include "rtc_base/string_to_number.h"
+
+namespace webrtc {
+
+// Parameter name in the format parameter map for AV1 video.
+const char kAV1FmtpProfile[] = "profile";
+
+absl::string_view AV1ProfileToString(AV1Profile profile) {
+ switch (profile) {
+ case AV1Profile::kProfile0:
+ return "0";
+ case AV1Profile::kProfile1:
+ return "1";
+ case AV1Profile::kProfile2:
+ return "2";
+ }
+ return "0";
+}
+
+absl::optional<AV1Profile> StringToAV1Profile(absl::string_view str) {
+ const absl::optional<int> i = rtc::StringToNumber<int>(str);
+ if (!i.has_value())
+ return absl::nullopt;
+
+ switch (i.value()) {
+ case 0:
+ return AV1Profile::kProfile0;
+ case 1:
+ return AV1Profile::kProfile1;
+ case 2:
+ return AV1Profile::kProfile2;
+ default:
+ return absl::nullopt;
+ }
+}
+
+absl::optional<AV1Profile> ParseSdpForAV1Profile(
+ const SdpVideoFormat::Parameters& params) {
+ const auto profile_it = params.find(kAV1FmtpProfile);
+ if (profile_it == params.end())
+ return AV1Profile::kProfile0;
+ const std::string& profile_str = profile_it->second;
+ return StringToAV1Profile(profile_str);
+}
+
+bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2) {
+ const absl::optional<AV1Profile> profile = ParseSdpForAV1Profile(params1);
+ const absl::optional<AV1Profile> other_profile =
+ ParseSdpForAV1Profile(params2);
+ return profile && other_profile && profile == other_profile;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/av1_profile.h b/third_party/libwebrtc/api/video_codecs/av1_profile.h
new file mode 100644
index 0000000000..2254d5ecd3
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/av1_profile.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_AV1_PROFILE_H_
+#define API_VIDEO_CODECS_AV1_PROFILE_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Profile information for AV1 video.
+extern RTC_EXPORT const char kAV1FmtpProfile[];
+
+// Profiles can be found at:
+// https://aomedia.org/av1/specification/annex-a/#profiles
+// The enum values match the number specified in the SDP.
+enum class AV1Profile {
+ kProfile0 = 0,
+ kProfile1 = 1,
+ kProfile2 = 2,
+};
+
+// Helper function which converts an AV1Profile to std::string. Returns "0" if
+// an unknown value is passed in.
+RTC_EXPORT absl::string_view AV1ProfileToString(AV1Profile profile);
+
+// Helper function which converts a std::string to AV1Profile. Returns null if
+// |profile| is not a valid profile string.
+absl::optional<AV1Profile> StringToAV1Profile(absl::string_view profile);
+
+// Parses an SDP key-value map of format parameters to retrive an AV1 profile.
+// Returns an AV1Profile if one has been specified, `kProfile0` if no profile is
+// specified and an empty value if the profile key is present but contains an
+// invalid value.
+RTC_EXPORT absl::optional<AV1Profile> ParseSdpForAV1Profile(
+ const SdpVideoFormat::Parameters& params);
+
+// Returns true if the parameters have the same AV1 profile or neither contains
+// an AV1 profile, otherwise false.
+bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_AV1_PROFILE_H_
diff --git a/third_party/libwebrtc/api/video_codecs/bitstream_parser.h b/third_party/libwebrtc/api/video_codecs/bitstream_parser.h
new file mode 100644
index 0000000000..86ce192e49
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/bitstream_parser.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_BITSTREAM_PARSER_H_
+#define API_VIDEO_CODECS_BITSTREAM_PARSER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+
+namespace webrtc {
+
+// This class is an interface for bitstream parsers.
+class BitstreamParser {
+ public:
+ virtual ~BitstreamParser() = default;
+
+ // Parse an additional chunk of the bitstream.
+ virtual void ParseBitstream(rtc::ArrayView<const uint8_t> bitstream) = 0;
+
+ // Get the last extracted QP value from the parsed bitstream. If no QP
+ // value could be parsed, returns absl::nullopt.
+ virtual absl::optional<int> GetLastSliceQp() const = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_BITSTREAM_PARSER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/bitstream_parser_api_gn/moz.build b/third_party/libwebrtc/api/video_codecs/bitstream_parser_api_gn/moz.build
new file mode 100644
index 0000000000..dcf5d2273c
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/bitstream_parser_api_gn/moz.build
@@ -0,0 +1,205 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("bitstream_parser_api_gn")
diff --git a/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.cc b/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.cc
new file mode 100644
index 0000000000..f831905189
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+
+#include <memory>
+
+#include "media/engine/internal_decoder_factory.h"
+
+namespace webrtc {
+
+std::unique_ptr<VideoDecoderFactory> CreateBuiltinVideoDecoderFactory() {
+ return std::make_unique<InternalDecoderFactory>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.h b/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.h
new file mode 100644
index 0000000000..d516077d99
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/builtin_video_decoder_factory.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_BUILTIN_VIDEO_DECODER_FACTORY_H_
+#define API_VIDEO_CODECS_BUILTIN_VIDEO_DECODER_FACTORY_H_
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Creates a new factory that can create the built-in types of video decoders.
+RTC_EXPORT std::unique_ptr<VideoDecoderFactory>
+CreateBuiltinVideoDecoderFactory();
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_BUILTIN_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.cc b/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.cc
new file mode 100644
index 0000000000..7c5f35b216
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/codec.h"
+#include "media/base/media_constants.h"
+#include "media/engine/encoder_simulcast_proxy.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace {
+
+// This class wraps the internal factory and adds simulcast.
+class BuiltinVideoEncoderFactory : public VideoEncoderFactory {
+ public:
+ BuiltinVideoEncoderFactory()
+ : internal_encoder_factory_(new InternalEncoderFactory()) {}
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override {
+ // Try creating internal encoder.
+ std::unique_ptr<VideoEncoder> internal_encoder;
+ if (format.IsCodecInList(
+ internal_encoder_factory_->GetSupportedFormats())) {
+ internal_encoder = std::make_unique<EncoderSimulcastProxy>(
+ internal_encoder_factory_.get(), format);
+ }
+
+ return internal_encoder;
+ }
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ return internal_encoder_factory_->GetSupportedFormats();
+ }
+
+ CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const override {
+ return internal_encoder_factory_->QueryCodecSupport(format,
+ scalability_mode);
+ }
+
+ private:
+ const std::unique_ptr<VideoEncoderFactory> internal_encoder_factory_;
+};
+
+} // namespace
+
+std::unique_ptr<VideoEncoderFactory> CreateBuiltinVideoEncoderFactory() {
+ return std::make_unique<BuiltinVideoEncoderFactory>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.h b/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.h
new file mode 100644
index 0000000000..2c4537205c
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/builtin_video_encoder_factory.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_BUILTIN_VIDEO_ENCODER_FACTORY_H_
+#define API_VIDEO_CODECS_BUILTIN_VIDEO_ENCODER_FACTORY_H_
+
+#include <memory>
+
+#include "api/video_codecs/video_encoder_factory.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Creates a new factory that can create the built-in types of video encoders.
+// The factory has simulcast support for VP8.
+RTC_EXPORT std::unique_ptr<VideoEncoderFactory>
+CreateBuiltinVideoEncoderFactory();
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_BUILTIN_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc
new file mode 100644
index 0000000000..02b43ba4f2
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc
@@ -0,0 +1,256 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/h264_profile_level_id.h"
+
+#include <cstdio>
+#include <cstdlib>
+#include <string>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace {
+
+const char kProfileLevelId[] = "profile-level-id";
+
+// For level_idc=11 and profile_idc=0x42, 0x4D, or 0x58, the constraint set3
+// flag specifies if level 1b or level 1.1 is used.
+const uint8_t kConstraintSet3Flag = 0x10;
+
+// Convert a string of 8 characters into a byte where the positions containing
+// character c will have their bit set. For example, c = 'x', str = "x1xx0000"
+// will return 0b10110000. constexpr is used so that the pattern table in
+// kProfilePatterns is statically initialized.
+constexpr uint8_t ByteMaskString(char c, const char (&str)[9]) {
+ return (str[0] == c) << 7 | (str[1] == c) << 6 | (str[2] == c) << 5 |
+ (str[3] == c) << 4 | (str[4] == c) << 3 | (str[5] == c) << 2 |
+ (str[6] == c) << 1 | (str[7] == c) << 0;
+}
+
+// Class for matching bit patterns such as "x1xx0000" where 'x' is allowed to be
+// either 0 or 1.
+class BitPattern {
+ public:
+ explicit constexpr BitPattern(const char (&str)[9])
+ : mask_(~ByteMaskString('x', str)),
+ masked_value_(ByteMaskString('1', str)) {}
+
+ bool IsMatch(uint8_t value) const { return masked_value_ == (value & mask_); }
+
+ private:
+ const uint8_t mask_;
+ const uint8_t masked_value_;
+};
+
+// Table for converting between profile_idc/profile_iop to H264Profile.
+struct ProfilePattern {
+ const uint8_t profile_idc;
+ const BitPattern profile_iop;
+ const H264Profile profile;
+};
+
+// This is from https://tools.ietf.org/html/rfc6184#section-8.1.
+constexpr ProfilePattern kProfilePatterns[] = {
+ {0x42, BitPattern("x1xx0000"), H264Profile::kProfileConstrainedBaseline},
+ {0x4D, BitPattern("1xxx0000"), H264Profile::kProfileConstrainedBaseline},
+ {0x58, BitPattern("11xx0000"), H264Profile::kProfileConstrainedBaseline},
+ {0x42, BitPattern("x0xx0000"), H264Profile::kProfileBaseline},
+ {0x58, BitPattern("10xx0000"), H264Profile::kProfileBaseline},
+ {0x4D, BitPattern("0x0x0000"), H264Profile::kProfileMain},
+ {0x64, BitPattern("00000000"), H264Profile::kProfileHigh},
+ {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh},
+ {0xF4, BitPattern("00000000"), H264Profile::kProfilePredictiveHigh444}};
+
+struct LevelConstraint {
+ const int max_macroblocks_per_second;
+ const int max_macroblock_frame_size;
+ const H264Level level;
+};
+
+// This is from ITU-T H.264 (02/2016) Table A-1 – Level limits.
+static constexpr LevelConstraint kLevelConstraints[] = {
+ {1485, 99, H264Level::kLevel1},
+ {1485, 99, H264Level::kLevel1_b},
+ {3000, 396, H264Level::kLevel1_1},
+ {6000, 396, H264Level::kLevel1_2},
+ {11880, 396, H264Level::kLevel1_3},
+ {11880, 396, H264Level::kLevel2},
+ {19800, 792, H264Level::kLevel2_1},
+ {20250, 1620, H264Level::kLevel2_2},
+ {40500, 1620, H264Level::kLevel3},
+ {108000, 3600, H264Level::kLevel3_1},
+ {216000, 5120, H264Level::kLevel3_2},
+ {245760, 8192, H264Level::kLevel4},
+ {245760, 8192, H264Level::kLevel4_1},
+ {522240, 8704, H264Level::kLevel4_2},
+ {589824, 22080, H264Level::kLevel5},
+ {983040, 36864, H264Level::kLevel5_1},
+ {2073600, 36864, H264Level::kLevel5_2},
+};
+
+} // anonymous namespace
+
+absl::optional<H264ProfileLevelId> ParseH264ProfileLevelId(const char* str) {
+ // The string should consist of 3 bytes in hexadecimal format.
+ if (strlen(str) != 6u)
+ return absl::nullopt;
+ const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16);
+ if (profile_level_id_numeric == 0)
+ return absl::nullopt;
+
+ // Separate into three bytes.
+ const uint8_t level_idc =
+ static_cast<uint8_t>(profile_level_id_numeric & 0xFF);
+ const uint8_t profile_iop =
+ static_cast<uint8_t>((profile_level_id_numeric >> 8) & 0xFF);
+ const uint8_t profile_idc =
+ static_cast<uint8_t>((profile_level_id_numeric >> 16) & 0xFF);
+
+ // Parse level based on level_idc and constraint set 3 flag.
+ H264Level level_casted = static_cast<H264Level>(level_idc);
+ H264Level level;
+
+ switch (level_casted) {
+ case H264Level::kLevel1_1:
+ level = (profile_iop & kConstraintSet3Flag) != 0 ? H264Level::kLevel1_b
+ : H264Level::kLevel1_1;
+ break;
+ case H264Level::kLevel1:
+ case H264Level::kLevel1_2:
+ case H264Level::kLevel1_3:
+ case H264Level::kLevel2:
+ case H264Level::kLevel2_1:
+ case H264Level::kLevel2_2:
+ case H264Level::kLevel3:
+ case H264Level::kLevel3_1:
+ case H264Level::kLevel3_2:
+ case H264Level::kLevel4:
+ case H264Level::kLevel4_1:
+ case H264Level::kLevel4_2:
+ case H264Level::kLevel5:
+ case H264Level::kLevel5_1:
+ case H264Level::kLevel5_2:
+ level = level_casted;
+ break;
+ default:
+ // Unrecognized level_idc.
+ return absl::nullopt;
+ }
+
+ // Parse profile_idc/profile_iop into a Profile enum.
+ for (const ProfilePattern& pattern : kProfilePatterns) {
+ if (profile_idc == pattern.profile_idc &&
+ pattern.profile_iop.IsMatch(profile_iop)) {
+ return H264ProfileLevelId(pattern.profile, level);
+ }
+ }
+
+ // Unrecognized profile_idc/profile_iop combination.
+ return absl::nullopt;
+}
+
+absl::optional<H264Level> H264SupportedLevel(int max_frame_pixel_count,
+ float max_fps) {
+ static const int kPixelsPerMacroblock = 16 * 16;
+
+ for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) {
+ const LevelConstraint& level_constraint = kLevelConstraints[i];
+ if (level_constraint.max_macroblock_frame_size * kPixelsPerMacroblock <=
+ max_frame_pixel_count &&
+ level_constraint.max_macroblocks_per_second <=
+ max_fps * level_constraint.max_macroblock_frame_size) {
+ return level_constraint.level;
+ }
+ }
+
+ // No level supported.
+ return absl::nullopt;
+}
+
+absl::optional<H264ProfileLevelId> ParseSdpForH264ProfileLevelId(
+ const SdpVideoFormat::Parameters& params) {
+ // TODO(magjed): The default should really be kProfileBaseline and kLevel1
+ // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In
+ // order to not break backwards compatibility with older versions of WebRTC
+ // where external codecs don't have any parameters, use
+ // kProfileConstrainedBaseline kLevel3_1 instead. This workaround will only be
+ // done in an interim period to allow external clients to update their code.
+ // http://crbug/webrtc/6337.
+ static const H264ProfileLevelId kDefaultProfileLevelId(
+ H264Profile::kProfileConstrainedBaseline, H264Level::kLevel3_1);
+
+ const auto profile_level_id_it = params.find(kProfileLevelId);
+ return (profile_level_id_it == params.end())
+ ? kDefaultProfileLevelId
+ : ParseH264ProfileLevelId(profile_level_id_it->second.c_str());
+}
+
+absl::optional<std::string> H264ProfileLevelIdToString(
+ const H264ProfileLevelId& profile_level_id) {
+ // Handle special case level == 1b.
+ if (profile_level_id.level == H264Level::kLevel1_b) {
+ switch (profile_level_id.profile) {
+ case H264Profile::kProfileConstrainedBaseline:
+ return {"42f00b"};
+ case H264Profile::kProfileBaseline:
+ return {"42100b"};
+ case H264Profile::kProfileMain:
+ return {"4d100b"};
+ // Level 1b is not allowed for other profiles.
+ default:
+ return absl::nullopt;
+ }
+ }
+
+ const char* profile_idc_iop_string;
+ switch (profile_level_id.profile) {
+ case H264Profile::kProfileConstrainedBaseline:
+ profile_idc_iop_string = "42e0";
+ break;
+ case H264Profile::kProfileBaseline:
+ profile_idc_iop_string = "4200";
+ break;
+ case H264Profile::kProfileMain:
+ profile_idc_iop_string = "4d00";
+ break;
+ case H264Profile::kProfileConstrainedHigh:
+ profile_idc_iop_string = "640c";
+ break;
+ case H264Profile::kProfileHigh:
+ profile_idc_iop_string = "6400";
+ break;
+ case H264Profile::kProfilePredictiveHigh444:
+ profile_idc_iop_string = "f400";
+ break;
+ // Unrecognized profile.
+ default:
+ return absl::nullopt;
+ }
+
+ char str[7];
+ snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level);
+ return {str};
+}
+
+bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2) {
+ const absl::optional<H264ProfileLevelId> profile_level_id =
+ ParseSdpForH264ProfileLevelId(params1);
+ const absl::optional<H264ProfileLevelId> other_profile_level_id =
+ ParseSdpForH264ProfileLevelId(params2);
+ // Compare H264 profiles, but not levels.
+ return profile_level_id && other_profile_level_id &&
+ profile_level_id->profile == other_profile_level_id->profile;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.h b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.h
new file mode 100644
index 0000000000..4b46ad329d
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_
+#define API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+enum class H264Profile {
+ kProfileConstrainedBaseline,
+ kProfileBaseline,
+ kProfileMain,
+ kProfileConstrainedHigh,
+ kProfileHigh,
+ kProfilePredictiveHigh444,
+};
+
+// All values are equal to ten times the level number, except level 1b which is
+// special.
+enum class H264Level {
+ kLevel1_b = 0,
+ kLevel1 = 10,
+ kLevel1_1 = 11,
+ kLevel1_2 = 12,
+ kLevel1_3 = 13,
+ kLevel2 = 20,
+ kLevel2_1 = 21,
+ kLevel2_2 = 22,
+ kLevel3 = 30,
+ kLevel3_1 = 31,
+ kLevel3_2 = 32,
+ kLevel4 = 40,
+ kLevel4_1 = 41,
+ kLevel4_2 = 42,
+ kLevel5 = 50,
+ kLevel5_1 = 51,
+ kLevel5_2 = 52
+};
+
+struct H264ProfileLevelId {
+ constexpr H264ProfileLevelId(H264Profile profile, H264Level level)
+ : profile(profile), level(level) {}
+ H264Profile profile;
+ H264Level level;
+};
+
+// Parse profile level id that is represented as a string of 3 hex bytes.
+// Nothing will be returned if the string is not a recognized H264
+// profile level id.
+absl::optional<H264ProfileLevelId> ParseH264ProfileLevelId(const char* str);
+
+// Parse profile level id that is represented as a string of 3 hex bytes
+// contained in an SDP key-value map. A default profile level id will be
+// returned if the profile-level-id key is missing. Nothing will be returned if
+// the key is present but the string is invalid.
+RTC_EXPORT absl::optional<H264ProfileLevelId> ParseSdpForH264ProfileLevelId(
+ const SdpVideoFormat::Parameters& params);
+
+// Given that a decoder supports up to a given frame size (in pixels) at up to a
+// given number of frames per second, return the highest H.264 level where it
+// can guarantee that it will be able to support all valid encoded streams that
+// are within that level.
+RTC_EXPORT absl::optional<H264Level> H264SupportedLevel(
+ int max_frame_pixel_count,
+ float max_fps);
+
+// Returns canonical string representation as three hex bytes of the profile
+// level id, or returns nothing for invalid profile level ids.
+RTC_EXPORT absl::optional<std::string> H264ProfileLevelIdToString(
+ const H264ProfileLevelId& profile_level_id);
+
+// Returns true if the parameters have the same H264 profile (Baseline, High,
+// etc).
+RTC_EXPORT bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_
diff --git a/third_party/libwebrtc/api/video_codecs/rtc_software_fallback_wrappers_gn/moz.build b/third_party/libwebrtc/api/video_codecs/rtc_software_fallback_wrappers_gn/moz.build
new file mode 100644
index 0000000000..987a01bd2f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/rtc_software_fallback_wrappers_gn/moz.build
@@ -0,0 +1,234 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc",
+ "/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "GLESv2",
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("rtc_software_fallback_wrappers_gn")
diff --git a/third_party/libwebrtc/api/video_codecs/scalability_mode.cc b/third_party/libwebrtc/api/video_codecs/scalability_mode.cc
new file mode 100644
index 0000000000..c449b4217e
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/scalability_mode.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/scalability_mode.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode) {
+ switch (scalability_mode) {
+ case ScalabilityMode::kL1T1:
+ return "L1T1";
+ case ScalabilityMode::kL1T2:
+ return "L1T2";
+ case ScalabilityMode::kL1T3:
+ return "L1T3";
+ case ScalabilityMode::kL2T1:
+ return "L2T1";
+ case ScalabilityMode::kL2T1h:
+ return "L2T1h";
+ case ScalabilityMode::kL2T1_KEY:
+ return "L2T1_KEY";
+ case ScalabilityMode::kL2T2:
+ return "L2T2";
+ case ScalabilityMode::kL2T2h:
+ return "L2T2h";
+ case ScalabilityMode::kL2T2_KEY:
+ return "L2T2_KEY";
+ case ScalabilityMode::kL2T2_KEY_SHIFT:
+ return "L2T2_KEY_SHIFT";
+ case ScalabilityMode::kL2T3:
+ return "L2T3";
+ case ScalabilityMode::kL2T3h:
+ return "L2T3h";
+ case ScalabilityMode::kL2T3_KEY:
+ return "L2T3_KEY";
+ case ScalabilityMode::kL3T1:
+ return "L3T1";
+ case ScalabilityMode::kL3T1h:
+ return "L3T1h";
+ case ScalabilityMode::kL3T1_KEY:
+ return "L3T1_KEY";
+ case ScalabilityMode::kL3T2:
+ return "L3T2";
+ case ScalabilityMode::kL3T2h:
+ return "L3T2h";
+ case ScalabilityMode::kL3T2_KEY:
+ return "L3T2_KEY";
+ case ScalabilityMode::kL3T3:
+ return "L3T3";
+ case ScalabilityMode::kL3T3h:
+ return "L3T3h";
+ case ScalabilityMode::kL3T3_KEY:
+ return "L3T3_KEY";
+ case ScalabilityMode::kS2T1:
+ return "S2T1";
+ case ScalabilityMode::kS2T1h:
+ return "S2T1h";
+ case ScalabilityMode::kS2T2:
+ return "S2T2";
+ case ScalabilityMode::kS2T2h:
+ return "S2T2h";
+ case ScalabilityMode::kS2T3:
+ return "S2T3";
+ case ScalabilityMode::kS2T3h:
+ return "S2T3h";
+ case ScalabilityMode::kS3T1:
+ return "S3T1";
+ case ScalabilityMode::kS3T1h:
+ return "S3T1h";
+ case ScalabilityMode::kS3T2:
+ return "S3T2";
+ case ScalabilityMode::kS3T2h:
+ return "S3T2h";
+ case ScalabilityMode::kS3T3:
+ return "S3T3";
+ case ScalabilityMode::kS3T3h:
+ return "S3T3h";
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/scalability_mode.h b/third_party/libwebrtc/api/video_codecs/scalability_mode.h
new file mode 100644
index 0000000000..b26f32eb22
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/scalability_mode.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_SCALABILITY_MODE_H_
+#define API_VIDEO_CODECS_SCALABILITY_MODE_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Supported scalability modes. Most applications should use the
+// PeerConnection-level apis where scalability mode is represented as a string.
+// This list of currently recognized modes is intended for the api boundary
+// between webrtc and injected encoders. Any application usage outside of
+// injected encoders is strongly discouraged.
+enum class ScalabilityMode : uint8_t {
+ kL1T1,
+ kL1T2,
+ kL1T3,
+ kL2T1,
+ kL2T1h,
+ kL2T1_KEY,
+ kL2T2,
+ kL2T2h,
+ kL2T2_KEY,
+ kL2T2_KEY_SHIFT,
+ kL2T3,
+ kL2T3h,
+ kL2T3_KEY,
+ kL3T1,
+ kL3T1h,
+ kL3T1_KEY,
+ kL3T2,
+ kL3T2h,
+ kL3T2_KEY,
+ kL3T3,
+ kL3T3h,
+ kL3T3_KEY,
+ kS2T1,
+ kS2T1h,
+ kS2T2,
+ kS2T2h,
+ kS2T3,
+ kS2T3h,
+ kS3T1,
+ kS3T1h,
+ kS3T2,
+ kS3T2h,
+ kS3T3,
+ kS3T3h,
+};
+
+inline constexpr ScalabilityMode kAllScalabilityModes[] = {
+ // clang-format off
+ ScalabilityMode::kL1T1,
+ ScalabilityMode::kL1T2,
+ ScalabilityMode::kL1T3,
+ ScalabilityMode::kL2T1,
+ ScalabilityMode::kL2T1h,
+ ScalabilityMode::kL2T1_KEY,
+ ScalabilityMode::kL2T2,
+ ScalabilityMode::kL2T2h,
+ ScalabilityMode::kL2T2_KEY,
+ ScalabilityMode::kL2T2_KEY_SHIFT,
+ ScalabilityMode::kL2T3,
+ ScalabilityMode::kL2T3h,
+ ScalabilityMode::kL2T3_KEY,
+ ScalabilityMode::kL3T1,
+ ScalabilityMode::kL3T1h,
+ ScalabilityMode::kL3T1_KEY,
+ ScalabilityMode::kL3T2,
+ ScalabilityMode::kL3T2h,
+ ScalabilityMode::kL3T2_KEY,
+ ScalabilityMode::kL3T3,
+ ScalabilityMode::kL3T3h,
+ ScalabilityMode::kL3T3_KEY,
+ ScalabilityMode::kS2T1,
+ ScalabilityMode::kS2T1h,
+ ScalabilityMode::kS2T2,
+ ScalabilityMode::kS2T2h,
+ ScalabilityMode::kS2T3,
+ ScalabilityMode::kS2T3h,
+ ScalabilityMode::kS3T1,
+ ScalabilityMode::kS3T1h,
+ ScalabilityMode::kS3T2,
+ ScalabilityMode::kS3T2h,
+ ScalabilityMode::kS3T3,
+ ScalabilityMode::kS3T3h,
+ // clang-format on
+};
+
+inline constexpr size_t kScalabilityModeCount =
+ sizeof(kAllScalabilityModes) / sizeof(ScalabilityMode);
+
+RTC_EXPORT
+absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_SCALABILITY_MODE_H_
diff --git a/third_party/libwebrtc/api/video_codecs/scalability_mode_gn/moz.build b/third_party/libwebrtc/api/video_codecs/scalability_mode_gn/moz.build
new file mode 100644
index 0000000000..a308bfc58f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/scalability_mode_gn/moz.build
@@ -0,0 +1,221 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video_codecs/scalability_mode.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("scalability_mode_gn")
diff --git a/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.cc b/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.cc
new file mode 100644
index 0000000000..b4571632d9
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/scalability_mode_helper.h"
+
+#include "modules/video_coding/svc/scalability_mode_util.h"
+
+namespace webrtc {
+
+absl::optional<int> ScalabilityModeStringToNumSpatialLayers(
+ absl::string_view scalability_mode_string) {
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(scalability_mode_string);
+ if (!scalability_mode.has_value()) {
+ return absl::nullopt;
+ }
+ return ScalabilityModeToNumSpatialLayers(*scalability_mode);
+}
+
+absl::optional<int> ScalabilityModeStringToNumTemporalLayers(
+ absl::string_view scalability_mode_string) {
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(scalability_mode_string);
+ if (!scalability_mode.has_value()) {
+ return absl::nullopt;
+ }
+ return ScalabilityModeToNumTemporalLayers(*scalability_mode);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.h b/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.h
new file mode 100644
index 0000000000..a8b060d079
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/scalability_mode_helper.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_
+#define API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// Returns the number of spatial layers from the `scalability_mode_string`
+// or nullopt if the given mode is unknown.
+absl::optional<int> ScalabilityModeStringToNumSpatialLayers(
+ absl::string_view scalability_mode_string);
+
+// Returns the number of temporal layers from the `scalability_mode_string`
+// or nullopt if the given mode is unknown.
+absl::optional<int> ScalabilityModeStringToNumTemporalLayers(
+ absl::string_view scalability_mode_string);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/sdp_video_format.cc b/third_party/libwebrtc/api/video_codecs/sdp_video_format.cc
new file mode 100644
index 0000000000..cb7e98a682
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/sdp_video_format.cc
@@ -0,0 +1,171 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/sdp_video_format.h"
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/video_codecs/av1_profile.h"
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/vp9_profile.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+namespace {
+
+std::string H264GetPacketizationModeOrDefault(
+ const SdpVideoFormat::Parameters& params) {
+ constexpr char kH264FmtpPacketizationMode[] = "packetization-mode";
+ const auto it = params.find(kH264FmtpPacketizationMode);
+ if (it != params.end()) {
+ return it->second;
+ }
+ // If packetization-mode is not present, default to "0".
+ // https://tools.ietf.org/html/rfc6184#section-6.2
+ return "0";
+}
+
+bool H264IsSamePacketizationMode(const SdpVideoFormat::Parameters& left,
+ const SdpVideoFormat::Parameters& right) {
+ return H264GetPacketizationModeOrDefault(left) ==
+ H264GetPacketizationModeOrDefault(right);
+}
+
+// Some (video) codecs are actually families of codecs and rely on parameters
+// to distinguish different incompatible family members.
+bool IsSameCodecSpecific(const SdpVideoFormat& format1,
+ const SdpVideoFormat& format2) {
+ // The assumption when calling this function is that the two formats have the
+ // same name.
+ RTC_DCHECK(absl::EqualsIgnoreCase(format1.name, format2.name));
+
+ VideoCodecType codec_type = PayloadStringToCodecType(format1.name);
+ switch (codec_type) {
+ case kVideoCodecH264:
+ return H264IsSameProfile(format1.parameters, format2.parameters) &&
+ H264IsSamePacketizationMode(format1.parameters,
+ format2.parameters);
+ case kVideoCodecVP9:
+ return VP9IsSameProfile(format1.parameters, format2.parameters);
+ case kVideoCodecAV1:
+ return AV1IsSameProfile(format1.parameters, format2.parameters);
+ default:
+ return true;
+ }
+}
+} // namespace
+
+SdpVideoFormat::SdpVideoFormat(const std::string& name) : name(name) {}
+
+SdpVideoFormat::SdpVideoFormat(const std::string& name,
+ const Parameters& parameters)
+ : name(name), parameters(parameters) {}
+
+SdpVideoFormat::SdpVideoFormat(
+ const std::string& name,
+ const Parameters& parameters,
+ const absl::InlinedVector<ScalabilityMode, kScalabilityModeCount>&
+ scalability_modes)
+ : name(name),
+ parameters(parameters),
+ scalability_modes(scalability_modes) {}
+
+SdpVideoFormat::SdpVideoFormat(const SdpVideoFormat&) = default;
+SdpVideoFormat::SdpVideoFormat(SdpVideoFormat&&) = default;
+SdpVideoFormat& SdpVideoFormat::operator=(const SdpVideoFormat&) = default;
+SdpVideoFormat& SdpVideoFormat::operator=(SdpVideoFormat&&) = default;
+
+SdpVideoFormat::~SdpVideoFormat() = default;
+
+std::string SdpVideoFormat::ToString() const {
+ rtc::StringBuilder builder;
+ builder << "Codec name: " << name << ", parameters: {";
+ for (const auto& kv : parameters) {
+ builder << " " << kv.first << "=" << kv.second;
+ }
+
+ builder << " }";
+ if (!scalability_modes.empty()) {
+ builder << ", scalability_modes: [";
+ bool first = true;
+ for (const auto scalability_mode : scalability_modes) {
+ if (first) {
+ first = false;
+ } else {
+ builder << ", ";
+ }
+ builder << ScalabilityModeToString(scalability_mode);
+ }
+ builder << "]";
+ }
+
+ return builder.str();
+}
+
+bool SdpVideoFormat::IsSameCodec(const SdpVideoFormat& other) const {
+ // Two codecs are considered the same if the name matches (case insensitive)
+ // and certain codec-specific parameters match.
+ return absl::EqualsIgnoreCase(name, other.name) &&
+ IsSameCodecSpecific(*this, other);
+}
+
+bool SdpVideoFormat::IsCodecInList(
+ rtc::ArrayView<const webrtc::SdpVideoFormat> formats) const {
+ for (const auto& format : formats) {
+ if (IsSameCodec(format)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) {
+ return a.name == b.name && a.parameters == b.parameters &&
+ a.scalability_modes == b.scalability_modes;
+}
+
+absl::optional<SdpVideoFormat> FuzzyMatchSdpVideoFormat(
+ rtc::ArrayView<const SdpVideoFormat> supported_formats,
+ const SdpVideoFormat& format) {
+ absl::optional<SdpVideoFormat> res;
+ int best_parameter_match = 0;
+ for (const auto& supported_format : supported_formats) {
+ if (absl::EqualsIgnoreCase(supported_format.name, format.name)) {
+ int matching_parameters = 0;
+ for (const auto& kv : supported_format.parameters) {
+ auto it = format.parameters.find(kv.first);
+ if (it != format.parameters.end() && it->second == kv.second) {
+ matching_parameters += 1;
+ }
+ }
+
+ if (!res || matching_parameters > best_parameter_match) {
+ res = supported_format;
+ best_parameter_match = matching_parameters;
+ }
+ }
+ }
+
+ if (!res) {
+ RTC_LOG(LS_INFO) << "Failed to match SdpVideoFormat " << format.ToString();
+ } else if (*res != format) {
+ RTC_LOG(LS_INFO) << "Matched SdpVideoFormat " << format.ToString()
+ << " with " << res->ToString();
+ }
+
+ return res;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/sdp_video_format.h b/third_party/libwebrtc/api/video_codecs/sdp_video_format.h
new file mode 100644
index 0000000000..faaa66c241
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/sdp_video_format.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_
+#define API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_
+
+#include <map>
+#include <string>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/array_view.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// SDP specification for a single video codec.
+// NOTE: This class is still under development and may change without notice.
+struct RTC_EXPORT SdpVideoFormat {
+ using Parameters = std::map<std::string, std::string>;
+
+ explicit SdpVideoFormat(const std::string& name);
+ SdpVideoFormat(const std::string& name, const Parameters& parameters);
+ SdpVideoFormat(
+ const std::string& name,
+ const Parameters& parameters,
+ const absl::InlinedVector<ScalabilityMode, kScalabilityModeCount>&
+ scalability_modes);
+ SdpVideoFormat(const SdpVideoFormat&);
+ SdpVideoFormat(SdpVideoFormat&&);
+ SdpVideoFormat& operator=(const SdpVideoFormat&);
+ SdpVideoFormat& operator=(SdpVideoFormat&&);
+
+ ~SdpVideoFormat();
+
+ // Returns true if the SdpVideoFormats have the same names as well as codec
+ // specific parameters. Please note that two SdpVideoFormats can represent the
+ // same codec even though not all parameters are the same.
+ bool IsSameCodec(const SdpVideoFormat& other) const;
+ bool IsCodecInList(
+ rtc::ArrayView<const webrtc::SdpVideoFormat> formats) const;
+
+ std::string ToString() const;
+
+ friend RTC_EXPORT bool operator==(const SdpVideoFormat& a,
+ const SdpVideoFormat& b);
+ friend RTC_EXPORT bool operator!=(const SdpVideoFormat& a,
+ const SdpVideoFormat& b) {
+ return !(a == b);
+ }
+
+ std::string name;
+ Parameters parameters;
+ absl::InlinedVector<ScalabilityMode, kScalabilityModeCount> scalability_modes;
+};
+
+// For not so good reasons sometimes additional parameters are added to an
+// SdpVideoFormat, which makes instances that should compare equal to not match
+// anymore. Until we stop misusing SdpVideoFormats provide this convenience
+// function to perform fuzzy matching.
+absl::optional<SdpVideoFormat> FuzzyMatchSdpVideoFormat(
+ rtc::ArrayView<const SdpVideoFormat> supported_formats,
+ const SdpVideoFormat& format);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_
diff --git a/third_party/libwebrtc/api/video_codecs/simulcast_stream.cc b/third_party/libwebrtc/api/video_codecs/simulcast_stream.cc
new file mode 100644
index 0000000000..312429ef9f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/simulcast_stream.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/simulcast_stream.h"
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+unsigned char SimulcastStream::GetNumberOfTemporalLayers() const {
+ return numberOfTemporalLayers;
+}
+void SimulcastStream::SetNumberOfTemporalLayers(unsigned char n) {
+ RTC_DCHECK_GE(n, 1);
+ RTC_DCHECK_LE(n, 3);
+ numberOfTemporalLayers = n;
+}
+
+ScalabilityMode SimulcastStream::GetScalabilityMode() const {
+ RTC_CHECK_GE(numberOfTemporalLayers, 1);
+ RTC_CHECK_LE(numberOfTemporalLayers, 3);
+ static const ScalabilityMode scalability_modes[3] = {
+ ScalabilityMode::kL1T1,
+ ScalabilityMode::kL1T2,
+ ScalabilityMode::kL1T3,
+ };
+ return scalability_modes[numberOfTemporalLayers - 1];
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/simulcast_stream.h b/third_party/libwebrtc/api/video_codecs/simulcast_stream.h
new file mode 100644
index 0000000000..7c0dd5d786
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/simulcast_stream.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_SIMULCAST_STREAM_H_
+#define API_VIDEO_CODECS_SIMULCAST_STREAM_H_
+
+#include "api/video_codecs/scalability_mode.h"
+
+namespace webrtc {
+
+// TODO(bugs.webrtc.org/6883): Unify with struct VideoStream, part of
+// VideoEncoderConfig.
+struct SimulcastStream {
+ // Temporary utility methods for transition from numberOfTemporalLayers
+ // setting to ScalabilityMode.
+ unsigned char GetNumberOfTemporalLayers() const;
+ ScalabilityMode GetScalabilityMode() const;
+ void SetNumberOfTemporalLayers(unsigned char n);
+
+ int width = 0;
+ int height = 0;
+ float maxFramerate = 0; // fps.
+ unsigned char numberOfTemporalLayers = 1;
+ unsigned int maxBitrate = 0; // kilobits/sec.
+ unsigned int targetBitrate = 0; // kilobits/sec.
+ unsigned int minBitrate = 0; // kilobits/sec.
+ unsigned int qpMax = 0; // minimum quality
+ bool active = false; // encoded and sent.
+};
+
+} // namespace webrtc
+#endif // API_VIDEO_CODECS_SIMULCAST_STREAM_H_
diff --git a/third_party/libwebrtc/api/video_codecs/spatial_layer.cc b/third_party/libwebrtc/api/video_codecs/spatial_layer.cc
new file mode 100644
index 0000000000..25ccdfeb48
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/spatial_layer.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/spatial_layer.h"
+
+namespace webrtc {
+
+bool SpatialLayer::operator==(const SpatialLayer& other) const {
+ return (width == other.width && height == other.height &&
+ maxFramerate == other.maxFramerate &&
+ numberOfTemporalLayers == other.numberOfTemporalLayers &&
+ maxBitrate == other.maxBitrate &&
+ targetBitrate == other.targetBitrate &&
+ minBitrate == other.minBitrate && qpMax == other.qpMax &&
+ active == other.active);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/spatial_layer.h b/third_party/libwebrtc/api/video_codecs/spatial_layer.h
new file mode 100644
index 0000000000..5a1b425427
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/spatial_layer.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_SPATIAL_LAYER_H_
+#define API_VIDEO_CODECS_SPATIAL_LAYER_H_
+
+namespace webrtc {
+
+struct SpatialLayer {
+ bool operator==(const SpatialLayer& other) const;
+ bool operator!=(const SpatialLayer& other) const { return !(*this == other); }
+
+ unsigned short width; // NOLINT(runtime/int)
+ unsigned short height; // NOLINT(runtime/int)
+ float maxFramerate; // fps.
+ unsigned char numberOfTemporalLayers;
+ unsigned int maxBitrate; // kilobits/sec.
+ unsigned int targetBitrate; // kilobits/sec.
+ unsigned int minBitrate; // kilobits/sec.
+ unsigned int qpMax; // minimum quality
+ bool active; // encoded and sent.
+};
+
+} // namespace webrtc
+#endif // API_VIDEO_CODECS_SPATIAL_LAYER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/test/BUILD.gn b/third_party/libwebrtc/api/video_codecs/test/BUILD.gn
new file mode 100644
index 0000000000..47d5ff9aa3
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/BUILD.gn
@@ -0,0 +1,81 @@
+# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../../webrtc.gni")
+
+if (rtc_include_tests) {
+ rtc_library("video_codecs_api_unittests") {
+ testonly = true
+ sources = [
+ "builtin_video_encoder_factory_unittest.cc",
+ "h264_profile_level_id_unittest.cc",
+ "sdp_video_format_unittest.cc",
+ "video_decoder_software_fallback_wrapper_unittest.cc",
+ "video_encoder_software_fallback_wrapper_unittest.cc",
+ ]
+
+ deps = [
+ ":video_decoder_factory_template_tests",
+ ":video_encoder_factory_template_tests",
+ "..:builtin_video_encoder_factory",
+ "..:rtc_software_fallback_wrappers",
+ "..:video_codecs_api",
+ "../..:fec_controller_api",
+ "../..:mock_video_encoder",
+ "../../../api:scoped_refptr",
+ "../../../media:media_constants",
+ "../../../media:rtc_media_base",
+ "../../../modules/video_coding:video_codec_interface",
+ "../../../modules/video_coding:video_coding_utility",
+ "../../../modules/video_coding:webrtc_vp8",
+ "../../../rtc_base:checks",
+ "../../../rtc_base:rtc_base_tests_utils",
+ "../../../test:field_trial",
+ "../../../test:test_support",
+ "../../../test:video_test_common",
+ "../../video:encoded_image",
+ "../../video:video_bitrate_allocation",
+ "../../video:video_frame",
+ "../../video:video_rtp_headers",
+ "//testing/gtest",
+ ]
+ absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
+ }
+
+ rtc_library("video_encoder_factory_template_tests") {
+ testonly = true
+ sources = [ "video_encoder_factory_template_tests.cc" ]
+
+ deps = [
+ "..:video_encoder_factory_template",
+ "..:video_encoder_factory_template_libaom_av1_adapter",
+ "..:video_encoder_factory_template_libvpx_vp8_adapter",
+ "..:video_encoder_factory_template_libvpx_vp9_adapter",
+ "..:video_encoder_factory_template_open_h264_adapter",
+ "../../:mock_video_encoder",
+ "../../../test:test_support",
+ "//testing/gtest",
+ ]
+ }
+
+ rtc_library("video_decoder_factory_template_tests") {
+ testonly = true
+ sources = [ "video_decoder_factory_template_tests.cc" ]
+
+ deps = [
+ "..:video_decoder_factory_template",
+ "..:video_decoder_factory_template_dav1d_adapter",
+ "..:video_decoder_factory_template_libvpx_vp8_adapter",
+ "..:video_decoder_factory_template_libvpx_vp9_adapter",
+ "..:video_decoder_factory_template_open_h264_adapter",
+ "../../:mock_video_decoder",
+ "../../../test:test_support",
+ "//testing/gtest",
+ ]
+ }
+}
diff --git a/third_party/libwebrtc/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc b/third_party/libwebrtc/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc
new file mode 100644
index 0000000000..84fd594b4c
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(BuiltinVideoEncoderFactoryTest, AnnouncesVp9AccordingToBuildFlags) {
+ std::unique_ptr<VideoEncoderFactory> factory =
+ CreateBuiltinVideoEncoderFactory();
+ bool claims_vp9_support = false;
+ for (const SdpVideoFormat& format : factory->GetSupportedFormats()) {
+ if (format.name == "VP9") {
+ claims_vp9_support = true;
+ break;
+ }
+ }
+#if defined(RTC_ENABLE_VP9)
+ EXPECT_TRUE(claims_vp9_support);
+#else
+ EXPECT_FALSE(claims_vp9_support);
+#endif // defined(RTC_ENABLE_VP9)
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/h264_profile_level_id_unittest.cc b/third_party/libwebrtc/api/video_codecs/test/h264_profile_level_id_unittest.cc
new file mode 100644
index 0000000000..47098d2682
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/h264_profile_level_id_unittest.cc
@@ -0,0 +1,171 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/h264_profile_level_id.h"
+
+#include <map>
+#include <string>
+
+#include "absl/types/optional.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+TEST(H264ProfileLevelId, TestParsingInvalid) {
+ // Malformed strings.
+ EXPECT_FALSE(ParseH264ProfileLevelId(""));
+ EXPECT_FALSE(ParseH264ProfileLevelId(" 42e01f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("4242e01f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("e01f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("gggggg"));
+
+ // Invalid level.
+ EXPECT_FALSE(ParseH264ProfileLevelId("42e000"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("42e00f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("42e0ff"));
+
+ // Invalid profile.
+ EXPECT_FALSE(ParseH264ProfileLevelId("42e11f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("58601f"));
+ EXPECT_FALSE(ParseH264ProfileLevelId("64e01f"));
+}
+
+TEST(H264ProfileLevelId, TestParsingLevel) {
+ EXPECT_EQ(H264Level::kLevel3_1, ParseH264ProfileLevelId("42e01f")->level);
+ EXPECT_EQ(H264Level::kLevel1_1, ParseH264ProfileLevelId("42e00b")->level);
+ EXPECT_EQ(H264Level::kLevel1_b, ParseH264ProfileLevelId("42f00b")->level);
+ EXPECT_EQ(H264Level::kLevel4_2, ParseH264ProfileLevelId("42C02A")->level);
+ EXPECT_EQ(H264Level::kLevel5_2, ParseH264ProfileLevelId("640c34")->level);
+}
+
+TEST(H264ProfileLevelId, TestParsingConstrainedBaseline) {
+ EXPECT_EQ(H264Profile::kProfileConstrainedBaseline,
+ ParseH264ProfileLevelId("42e01f")->profile);
+ EXPECT_EQ(H264Profile::kProfileConstrainedBaseline,
+ ParseH264ProfileLevelId("42C02A")->profile);
+ EXPECT_EQ(H264Profile::kProfileConstrainedBaseline,
+ ParseH264ProfileLevelId("4de01f")->profile);
+ EXPECT_EQ(H264Profile::kProfileConstrainedBaseline,
+ ParseH264ProfileLevelId("58f01f")->profile);
+}
+
+TEST(H264ProfileLevelId, TestParsingBaseline) {
+ EXPECT_EQ(H264Profile::kProfileBaseline,
+ ParseH264ProfileLevelId("42a01f")->profile);
+ EXPECT_EQ(H264Profile::kProfileBaseline,
+ ParseH264ProfileLevelId("58A01F")->profile);
+}
+
+TEST(H264ProfileLevelId, TestParsingMain) {
+ EXPECT_EQ(H264Profile::kProfileMain,
+ ParseH264ProfileLevelId("4D401f")->profile);
+}
+
+TEST(H264ProfileLevelId, TestParsingHigh) {
+ EXPECT_EQ(H264Profile::kProfileHigh,
+ ParseH264ProfileLevelId("64001f")->profile);
+}
+
+TEST(H264ProfileLevelId, TestParsingConstrainedHigh) {
+ EXPECT_EQ(H264Profile::kProfileConstrainedHigh,
+ ParseH264ProfileLevelId("640c1f")->profile);
+}
+
+TEST(H264ProfileLevelId, TestSupportedLevel) {
+ EXPECT_EQ(H264Level::kLevel2_1, *H264SupportedLevel(640 * 480, 25));
+ EXPECT_EQ(H264Level::kLevel3_1, *H264SupportedLevel(1280 * 720, 30));
+ EXPECT_EQ(H264Level::kLevel4_2, *H264SupportedLevel(1920 * 1280, 60));
+}
+
+// Test supported level below level 1 requirements.
+TEST(H264ProfileLevelId, TestSupportedLevelInvalid) {
+ EXPECT_FALSE(H264SupportedLevel(0, 0));
+ // All levels support fps > 5.
+ EXPECT_FALSE(H264SupportedLevel(1280 * 720, 5));
+ // All levels support frame sizes > 183 * 137.
+ EXPECT_FALSE(H264SupportedLevel(183 * 137, 30));
+}
+
+TEST(H264ProfileLevelId, TestToString) {
+ EXPECT_EQ("42e01f", *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileConstrainedBaseline,
+ H264Level::kLevel3_1)));
+ EXPECT_EQ("42000a", *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileBaseline, H264Level::kLevel1)));
+ EXPECT_EQ("4d001f", H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileMain, H264Level::kLevel3_1)));
+ EXPECT_EQ("640c2a",
+ *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileConstrainedHigh, H264Level::kLevel4_2)));
+ EXPECT_EQ("64002a", *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileHigh, H264Level::kLevel4_2)));
+}
+
+TEST(H264ProfileLevelId, TestToStringLevel1b) {
+ EXPECT_EQ("42f00b", *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileConstrainedBaseline,
+ H264Level::kLevel1_b)));
+ EXPECT_EQ("42100b",
+ *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileBaseline, H264Level::kLevel1_b)));
+ EXPECT_EQ("4d100b", *H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileMain, H264Level::kLevel1_b)));
+}
+
+TEST(H264ProfileLevelId, TestToStringRoundTrip) {
+ EXPECT_EQ("42e01f",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("42e01f")));
+ EXPECT_EQ("42e01f",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("42E01F")));
+ EXPECT_EQ("4d100b",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("4d100b")));
+ EXPECT_EQ("4d100b",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("4D100B")));
+ EXPECT_EQ("640c2a",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("640c2a")));
+ EXPECT_EQ("640c2a",
+ *H264ProfileLevelIdToString(*ParseH264ProfileLevelId("640C2A")));
+}
+
+TEST(H264ProfileLevelId, TestToStringInvalid) {
+ EXPECT_FALSE(H264ProfileLevelIdToString(
+ H264ProfileLevelId(H264Profile::kProfileHigh, H264Level::kLevel1_b)));
+ EXPECT_FALSE(H264ProfileLevelIdToString(H264ProfileLevelId(
+ H264Profile::kProfileConstrainedHigh, H264Level::kLevel1_b)));
+ EXPECT_FALSE(H264ProfileLevelIdToString(
+ H264ProfileLevelId(static_cast<H264Profile>(255), H264Level::kLevel3_1)));
+}
+
+TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) {
+ const absl::optional<H264ProfileLevelId> profile_level_id =
+ ParseSdpForH264ProfileLevelId(SdpVideoFormat::Parameters());
+ EXPECT_TRUE(profile_level_id);
+ EXPECT_EQ(H264Profile::kProfileConstrainedBaseline,
+ profile_level_id->profile);
+ EXPECT_EQ(H264Level::kLevel3_1, profile_level_id->level);
+}
+
+TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) {
+ SdpVideoFormat::Parameters params;
+ params["profile-level-id"] = "640c2a";
+ const absl::optional<H264ProfileLevelId> profile_level_id =
+ ParseSdpForH264ProfileLevelId(params);
+ EXPECT_TRUE(profile_level_id);
+ EXPECT_EQ(H264Profile::kProfileConstrainedHigh, profile_level_id->profile);
+ EXPECT_EQ(H264Level::kLevel4_2, profile_level_id->level);
+}
+
+TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdInvalid) {
+ SdpVideoFormat::Parameters params;
+ params["profile-level-id"] = "foobar";
+ EXPECT_FALSE(ParseSdpForH264ProfileLevelId(params));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/sdp_video_format_unittest.cc b/third_party/libwebrtc/api/video_codecs/test/sdp_video_format_unittest.cc
new file mode 100644
index 0000000000..bb158aeb95
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/sdp_video_format_unittest.cc
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/sdp_video_format.h"
+
+#include <stdint.h>
+
+#include "media/base/media_constants.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+typedef SdpVideoFormat Sdp;
+typedef SdpVideoFormat::Parameters Params;
+
+TEST(SdpVideoFormatTest, SameCodecNameNoParameters) {
+ EXPECT_TRUE(Sdp("H264").IsSameCodec(Sdp("h264")));
+ EXPECT_TRUE(Sdp("VP8").IsSameCodec(Sdp("vp8")));
+ EXPECT_TRUE(Sdp("VP9").IsSameCodec(Sdp("vp9")));
+ EXPECT_TRUE(Sdp("AV1").IsSameCodec(Sdp("Av1")));
+}
+
+TEST(SdpVideoFormatTest, DifferentCodecNameNoParameters) {
+ EXPECT_FALSE(Sdp("H264").IsSameCodec(Sdp("VP8")));
+ EXPECT_FALSE(Sdp("VP8").IsSameCodec(Sdp("VP9")));
+ EXPECT_FALSE(Sdp("AV1").IsSameCodec(Sdp("VP8")));
+}
+
+TEST(SdpVideoFormatTest, SameCodecNameSameParameters) {
+ EXPECT_TRUE(Sdp("VP9").IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_TRUE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_TRUE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "2"}})));
+ EXPECT_TRUE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "42e01f"}})));
+ EXPECT_TRUE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "640c34"}})));
+ EXPECT_TRUE(Sdp("AV1").IsSameCodec(Sdp("AV1", Params{{"profile", "0"}})));
+ EXPECT_TRUE(Sdp("AV1", Params{{"profile", "0"}})
+ .IsSameCodec(Sdp("AV1", Params{{"profile", "0"}})));
+ EXPECT_TRUE(Sdp("AV1", Params{{"profile", "2"}})
+ .IsSameCodec(Sdp("AV1", Params{{"profile", "2"}})));
+}
+
+TEST(SdpVideoFormatTest, SameCodecNameDifferentParameters) {
+ EXPECT_FALSE(Sdp("VP9").IsSameCodec(Sdp("VP9", Params{{"profile-id", "2"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "1"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-id", "0"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "640c34"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-level-id", "42f00b"}})));
+ EXPECT_FALSE(Sdp("AV1").IsSameCodec(Sdp("AV1", Params{{"profile", "1"}})));
+ EXPECT_FALSE(Sdp("AV1", Params{{"profile", "0"}})
+ .IsSameCodec(Sdp("AV1", Params{{"profile", "1"}})));
+ EXPECT_FALSE(Sdp("AV1", Params{{"profile", "1"}})
+ .IsSameCodec(Sdp("AV1", Params{{"profile", "2"}})));
+}
+
+TEST(SdpVideoFormatTest, DifferentCodecNameSameParameters) {
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "0"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile-id", "0"}})));
+ EXPECT_FALSE(Sdp("VP9", Params{{"profile-id", "2"}})
+ .IsSameCodec(Sdp("VP8", Params{{"profile-id", "2"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "42e01f"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile-level-id", "42e01f"}})));
+ EXPECT_FALSE(
+ Sdp("H264", Params{{"profile-level-id", "640c34"}})
+ .IsSameCodec(Sdp("VP8", Params{{"profile-level-id", "640c34"}})));
+ EXPECT_FALSE(Sdp("AV1", Params{{"profile", "0"}})
+ .IsSameCodec(Sdp("H264", Params{{"profile", "0"}})));
+ EXPECT_FALSE(Sdp("AV1", Params{{"profile", "2"}})
+ .IsSameCodec(Sdp("VP9", Params{{"profile", "2"}})));
+}
+
+TEST(SdpVideoFormatTest, H264PacketizationMode) {
+ // The default packetization mode is 0.
+ EXPECT_TRUE(Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "0"}})
+ .IsSameCodec(Sdp("H264")));
+ EXPECT_FALSE(Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}})
+ .IsSameCodec(Sdp("H264")));
+
+ EXPECT_TRUE(
+ Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}})
+ .IsSameCodec(
+ Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}})));
+}
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/video_decoder_factory_template_tests.cc b/third_party/libwebrtc/api/video_codecs/test/video_decoder_factory_template_tests.cc
new file mode 100644
index 0000000000..e9d7052501
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/video_decoder_factory_template_tests.cc
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/test/mock_video_decoder.h"
+#include "api/video_codecs/video_decoder_factory_template.h"
+#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::Eq;
+using ::testing::Field;
+using ::testing::IsEmpty;
+using ::testing::Ne;
+using ::testing::Not;
+using ::testing::UnorderedElementsAre;
+
+namespace webrtc {
+namespace {
+const SdpVideoFormat kFooSdp("Foo");
+const SdpVideoFormat kBarLowSdp("Bar", {{"profile", "low"}});
+const SdpVideoFormat kBarHighSdp("Bar", {{"profile", "high"}});
+
+struct FooDecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() { return {kFooSdp}; }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+ auto decoder = std::make_unique<testing::StrictMock<MockVideoDecoder>>();
+ EXPECT_CALL(*decoder, Destruct);
+ return decoder;
+ }
+};
+
+struct BarDecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return {kBarLowSdp, kBarHighSdp};
+ }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+ auto decoder = std::make_unique<testing::StrictMock<MockVideoDecoder>>();
+ EXPECT_CALL(*decoder, Destruct);
+ return decoder;
+ }
+};
+
+TEST(VideoDecoderFactoryTemplate, OneTemplateAdapterCreateDecoder) {
+ VideoDecoderFactoryTemplate<FooDecoderTemplateAdapter> factory;
+ EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp));
+ EXPECT_THAT(factory.CreateVideoDecoder(kFooSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("FooX")), Eq(nullptr));
+}
+
+TEST(VideoDecoderFactoryTemplate, TwoTemplateAdaptersNoDuplicates) {
+ VideoDecoderFactoryTemplate<FooDecoderTemplateAdapter,
+ FooDecoderTemplateAdapter>
+ factory;
+ EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp));
+}
+
+TEST(VideoDecoderFactoryTemplate, TwoTemplateAdaptersCreateDecoders) {
+ VideoDecoderFactoryTemplate<FooDecoderTemplateAdapter,
+ BarDecoderTemplateAdapter>
+ factory;
+ EXPECT_THAT(factory.GetSupportedFormats(),
+ UnorderedElementsAre(kFooSdp, kBarLowSdp, kBarHighSdp));
+ EXPECT_THAT(factory.CreateVideoDecoder(kFooSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoDecoder(kBarLowSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoDecoder(kBarHighSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("FooX")), Eq(nullptr));
+ EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("Bar")), Eq(nullptr));
+}
+
+TEST(VideoDecoderFactoryTemplate, LibvpxVp8) {
+ VideoDecoderFactoryTemplate<LibvpxVp8DecoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats.size(), 1);
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "VP8"));
+ EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr));
+}
+
+TEST(VideoDecoderFactoryTemplate, LibvpxVp9) {
+ VideoDecoderFactoryTemplate<LibvpxVp9DecoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats, Not(IsEmpty()));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "VP9")));
+ EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr));
+}
+
+// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build
+// target remove this #ifdef.
+#if defined(WEBRTC_USE_H264)
+TEST(VideoDecoderFactoryTemplate, OpenH264) {
+ VideoDecoderFactoryTemplate<OpenH264DecoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats, Not(IsEmpty()));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "H264")));
+ EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr));
+}
+#endif // defined(WEBRTC_USE_H264)
+
+TEST(VideoDecoderFactoryTemplate, Dav1d) {
+ VideoDecoderFactoryTemplate<Dav1dDecoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats.size(), 1);
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "AV1"));
+ EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc b/third_party/libwebrtc/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc
new file mode 100644
index 0000000000..73dedc8395
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc
@@ -0,0 +1,305 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_decoder_software_fallback_wrapper.h"
+
+#include <stdint.h>
+
+#include "absl/types/optional.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "test/field_trial.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+
+class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test {
+ protected:
+ VideoDecoderSoftwareFallbackWrapperTest()
+ : VideoDecoderSoftwareFallbackWrapperTest("") {}
+ explicit VideoDecoderSoftwareFallbackWrapperTest(
+ const std::string& field_trials)
+ : override_field_trials_(field_trials),
+ fake_decoder_(new CountingFakeDecoder()),
+ fallback_wrapper_(CreateVideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder>(VP8Decoder::Create()),
+ std::unique_ptr<VideoDecoder>(fake_decoder_))) {}
+
+ class CountingFakeDecoder : public VideoDecoder {
+ public:
+ bool Configure(const Settings& settings) override {
+ ++configure_count_;
+ return configure_return_value_;
+ }
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override {
+ ++decode_count_;
+ return decode_return_code_;
+ }
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override {
+ decode_complete_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override {
+ ++release_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ const char* ImplementationName() const override { return "fake-decoder"; }
+
+ int configure_count_ = 0;
+ int decode_count_ = 0;
+ bool configure_return_value_ = true;
+ int32_t decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ DecodedImageCallback* decode_complete_callback_ = nullptr;
+ int release_count_ = 0;
+ int reset_count_ = 0;
+ };
+ test::ScopedFieldTrials override_field_trials_;
+ // `fake_decoder_` is owned and released by `fallback_wrapper_`.
+ CountingFakeDecoder* fake_decoder_;
+ std::unique_ptr<VideoDecoder> fallback_wrapper_;
+};
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, InitializesDecoder) {
+ fallback_wrapper_->Configure({});
+ EXPECT_EQ(1, fake_decoder_->configure_count_);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, fake_decoder_->configure_count_)
+ << "Initialized decoder should not be reinitialized.";
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ UsesFallbackDecoderAfterAnyInitDecodeFailure) {
+ fake_decoder_->configure_return_value_ = false;
+ fallback_wrapper_->Configure({});
+ EXPECT_EQ(1, fake_decoder_->configure_count_);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, fake_decoder_->configure_count_)
+ << "Should not have attempted reinitializing the fallback decoder on "
+ "keyframe.";
+ // Unfortunately faking a VP8 frame is hard. Rely on no Decode -> using SW
+ // decoder.
+ EXPECT_EQ(0, fake_decoder_->decode_count_)
+ << "Decoder used even though no InitDecode had succeeded.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, IsSoftwareFallbackSticky) {
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+
+ // Software fallback should be sticky, fake_decoder_ shouldn't be used.
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, fake_decoder_->decode_count_)
+ << "Decoder shouldn't be used after failure.";
+
+ // fake_decoder_ should have only been initialized once during the test.
+ EXPECT_EQ(1, fake_decoder_->configure_count_);
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, DoesNotFallbackOnEveryError) {
+ fallback_wrapper_->Configure({});
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ EXPECT_EQ(fake_decoder_->decode_return_code_,
+ fallback_wrapper_->Decode(encoded_image, false, -1));
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(2, fake_decoder_->decode_count_)
+ << "Decoder should be active even though previous decode failed.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, UsesHwDecoderAfterReinit) {
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, fake_decoder_->decode_count_);
+
+ fallback_wrapper_->Release();
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(2, fake_decoder_->decode_count_)
+ << "Should not be using fallback after reinit.";
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsReleaseCall) {
+ fallback_wrapper_->Configure({});
+ fallback_wrapper_->Release();
+ EXPECT_EQ(1, fake_decoder_->release_count_);
+
+ fallback_wrapper_->Configure({});
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(2, fake_decoder_->release_count_)
+ << "Decoder should be released during fallback.";
+ fallback_wrapper_->Release();
+ EXPECT_EQ(2, fake_decoder_->release_count_);
+}
+
+// TODO(pbos): Fake a VP8 frame well enough to actually receive a callback from
+// the software decoder.
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ForwardsRegisterDecodeCompleteCallback) {
+ class FakeDecodedImageCallback : public DecodedImageCallback {
+ int32_t Decoded(VideoFrame& decodedImage) override { return 0; }
+ int32_t Decoded(webrtc::VideoFrame& decodedImage,
+ int64_t decode_time_ms) override {
+ RTC_DCHECK_NOTREACHED();
+ return -1;
+ }
+ void Decoded(webrtc::VideoFrame& decodedImage,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) override {
+ RTC_DCHECK_NOTREACHED();
+ }
+ } callback;
+
+ fallback_wrapper_->Configure({});
+ fallback_wrapper_->RegisterDecodeCompleteCallback(&callback);
+ EXPECT_EQ(&callback, fake_decoder_->decode_complete_callback_);
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ EncodedImage encoded_image;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_->ImplementationName());
+ fallback_wrapper_->Release();
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest, FallbacksOnTooManyErrors) {
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ // Doesn't fallback from a single error.
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+
+ // However, many frames with the same error, fallback should happen.
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ // Hard coded expected value since libvpx is the software implementation name
+ // for VP8. Change accordingly if the underlying implementation does.
+ EXPECT_STREQ("libvpx (fallback from: fake-decoder)",
+ fallback_wrapper_->ImplementationName());
+ fallback_wrapper_->Release();
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ DoesNotFallbackOnDeltaFramesErrors) {
+ fallback_wrapper_->Configure({});
+
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameDelta;
+
+ // Many decoded frames with the same error
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+
+ fallback_wrapper_->Release();
+}
+
+TEST_F(VideoDecoderSoftwareFallbackWrapperTest,
+ DoesNotFallbacksOnNonConsequtiveErrors) {
+ fallback_wrapper_->Configure({});
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+
+ const int kNumFramesToEncode = 10;
+ for (int i = 0; i < kNumFramesToEncode; ++i) {
+ // Interleaved errors and successful decodes.
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ }
+ EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName());
+ fallback_wrapper_->Release();
+}
+
+class ForcedSoftwareDecoderFallbackTest
+ : public VideoDecoderSoftwareFallbackWrapperTest {
+ public:
+ ForcedSoftwareDecoderFallbackTest()
+ : VideoDecoderSoftwareFallbackWrapperTest(
+ "WebRTC-Video-ForcedSwDecoderFallback/Enabled/") {
+ fake_decoder_ = new CountingFakeDecoder();
+ sw_fallback_decoder_ = new CountingFakeDecoder();
+ fallback_wrapper_ = CreateVideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder>(sw_fallback_decoder_),
+ std::unique_ptr<VideoDecoder>(fake_decoder_));
+ }
+
+ CountingFakeDecoder* sw_fallback_decoder_;
+};
+
+TEST_F(ForcedSoftwareDecoderFallbackTest, UsesForcedFallback) {
+ fallback_wrapper_->Configure({});
+ EXPECT_EQ(1, sw_fallback_decoder_->configure_count_);
+
+ EncodedImage encoded_image;
+ encoded_image._frameType = VideoFrameType::kVideoFrameKey;
+ fallback_wrapper_->Decode(encoded_image, false, -1);
+ EXPECT_EQ(1, sw_fallback_decoder_->configure_count_);
+ EXPECT_EQ(1, sw_fallback_decoder_->decode_count_);
+
+ fallback_wrapper_->Release();
+ EXPECT_EQ(1, sw_fallback_decoder_->release_count_);
+
+ // Only fallback decoder should have been used.
+ EXPECT_EQ(0, fake_decoder_->configure_count_);
+ EXPECT_EQ(0, fake_decoder_->decode_count_);
+ EXPECT_EQ(0, fake_decoder_->release_count_);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/video_encoder_factory_template_tests.cc b/third_party/libwebrtc/api/video_codecs/test/video_encoder_factory_template_tests.cc
new file mode 100644
index 0000000000..4c3d0cd24e
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/video_encoder_factory_template_tests.cc
@@ -0,0 +1,172 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/test/mock_video_encoder.h"
+#include "api/video_codecs/video_encoder_factory_template.h"
+#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h"
+#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::Contains;
+using ::testing::Each;
+using ::testing::Eq;
+using ::testing::Field;
+using ::testing::IsEmpty;
+using ::testing::Ne;
+using ::testing::Not;
+using ::testing::UnorderedElementsAre;
+
+namespace webrtc {
+namespace {
+using CodecSupport = VideoEncoderFactory::CodecSupport;
+const SdpVideoFormat kFooSdp("Foo");
+const SdpVideoFormat kBarLowSdp("Bar", {{"profile", "low"}});
+const SdpVideoFormat kBarHighSdp("Bar", {{"profile", "high"}});
+
+struct FooEncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() { return {kFooSdp}; }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+ return std::make_unique<testing::StrictMock<MockVideoEncoder>>();
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+ return scalability_mode == ScalabilityMode::kL1T2 ||
+ scalability_mode == ScalabilityMode::kL1T3;
+ }
+};
+
+struct BarEncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return {kBarLowSdp, kBarHighSdp};
+ }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+ return std::make_unique<testing::StrictMock<MockVideoEncoder>>();
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+ return scalability_mode == ScalabilityMode::kL1T2 ||
+ scalability_mode == ScalabilityMode::kL1T3 ||
+ scalability_mode == ScalabilityMode::kS2T1 ||
+ scalability_mode == ScalabilityMode::kS3T3;
+ }
+};
+
+TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCreateEncoder) {
+ VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter> factory;
+ EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp));
+ EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), Eq(nullptr));
+}
+
+TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCodecSupport) {
+ VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter> factory;
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, absl::nullopt),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "L1T2"),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "S3T3"),
+ Field(&CodecSupport::is_supported, false));
+ EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat("FooX"), absl::nullopt),
+ Field(&CodecSupport::is_supported, false));
+}
+
+TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersNoDuplicates) {
+ VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter,
+ FooEncoderTemplateAdapter>
+ factory;
+ EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp));
+}
+
+TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCreateEncoders) {
+ VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter,
+ BarEncoderTemplateAdapter>
+ factory;
+ EXPECT_THAT(factory.GetSupportedFormats(),
+ UnorderedElementsAre(kFooSdp, kBarLowSdp, kBarHighSdp));
+ EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoEncoder(kBarLowSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoEncoder(kBarHighSdp), Ne(nullptr));
+ EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), Eq(nullptr));
+ EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("Bar")), Eq(nullptr));
+}
+
+TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCodecSupport) {
+ VideoEncoderFactoryTemplate<FooEncoderTemplateAdapter,
+ BarEncoderTemplateAdapter>
+ factory;
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, absl::nullopt),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "L1T2"),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "S3T3"),
+ Field(&CodecSupport::is_supported, false));
+ EXPECT_THAT(factory.QueryCodecSupport(kBarLowSdp, absl::nullopt),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kBarHighSdp, absl::nullopt),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kBarLowSdp, "S2T1"),
+ Field(&CodecSupport::is_supported, true));
+ EXPECT_THAT(factory.QueryCodecSupport(kBarHighSdp, "S3T2"),
+ Field(&CodecSupport::is_supported, false));
+}
+
+TEST(VideoEncoderFactoryTemplate, LibvpxVp8) {
+ VideoEncoderFactoryTemplate<LibvpxVp8EncoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats.size(), 1);
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "VP8"));
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes,
+ Contains(ScalabilityMode::kL1T3)));
+ EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
+}
+
+TEST(VideoEncoderFactoryTemplate, LibvpxVp9) {
+ VideoEncoderFactoryTemplate<LibvpxVp9EncoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats, Not(IsEmpty()));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "VP9")));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes,
+ Contains(ScalabilityMode::kL3T3_KEY))));
+ EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
+}
+
+// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build
+// target remove this #ifdef.
+#if defined(WEBRTC_USE_H264)
+TEST(VideoEncoderFactoryTemplate, OpenH264) {
+ VideoEncoderFactoryTemplate<OpenH264EncoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats, Not(IsEmpty()));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "H264")));
+ EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes,
+ Contains(ScalabilityMode::kL1T3))));
+ EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
+}
+#endif // defined(WEBRTC_USE_H264)
+
+TEST(VideoEncoderFactoryTemplate, LibaomAv1) {
+ VideoEncoderFactoryTemplate<LibaomAv1EncoderTemplateAdapter> factory;
+ auto formats = factory.GetSupportedFormats();
+ EXPECT_THAT(formats.size(), 1);
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "AV1"));
+ EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes,
+ Contains(ScalabilityMode::kL3T3_KEY)));
+ EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), Ne(nullptr));
+}
+
+} // namespace
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/third_party/libwebrtc/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
new file mode 100644
index 0000000000..b213356e7b
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc
@@ -0,0 +1,1055 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/fec_controller_override.h"
+#include "api/scoped_refptr.h"
+#include "api/test/mock_video_encoder.h"
+#include "api/video/encoded_image.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/video_rotation.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/utility/simulcast_rate_allocator.h"
+#include "rtc_base/fake_clock.h"
+#include "test/fake_texture_frame.h"
+#include "test/field_trial.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+using ::testing::_;
+using ::testing::Return;
+
+namespace {
+const int kWidth = 320;
+const int kHeight = 240;
+const int kNumCores = 2;
+const uint32_t kFramerate = 30;
+const size_t kMaxPayloadSize = 800;
+const int kLowThreshold = 10;
+const int kHighThreshold = 20;
+
+const VideoEncoder::Capabilities kCapabilities(false);
+const VideoEncoder::Settings kSettings(kCapabilities,
+ kNumCores,
+ kMaxPayloadSize);
+
+VideoEncoder::EncoderInfo GetEncoderInfoWithTrustedRateController(
+ bool trusted_rate_controller) {
+ VideoEncoder::EncoderInfo info;
+ info.has_trusted_rate_controller = trusted_rate_controller;
+ return info;
+}
+
+VideoEncoder::EncoderInfo GetEncoderInfoWithHardwareAccelerated(
+ bool hardware_accelerated) {
+ VideoEncoder::EncoderInfo info;
+ info.is_hardware_accelerated = hardware_accelerated;
+ return info;
+}
+
+class FakeEncodedImageCallback : public EncodedImageCallback {
+ public:
+ Result OnEncodedImage(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) override {
+ ++callback_count_;
+ return Result(Result::OK, callback_count_);
+ }
+ int callback_count_ = 0;
+};
+} // namespace
+
+class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test {
+ protected:
+ VideoEncoderSoftwareFallbackWrapperTestBase(
+ const std::string& field_trials,
+ std::unique_ptr<VideoEncoder> sw_encoder)
+ : override_field_trials_(field_trials),
+ fake_encoder_(new CountingFakeEncoder()),
+ wrapper_initialized_(false),
+ fallback_wrapper_(CreateVideoEncoderSoftwareFallbackWrapper(
+ std::move(sw_encoder),
+ std::unique_ptr<VideoEncoder>(fake_encoder_),
+ false)) {}
+
+ class CountingFakeEncoder : public VideoEncoder {
+ public:
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override {
+ // Ignored.
+ }
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const VideoEncoder::Settings& settings) override {
+ ++init_encode_count_;
+ return init_encode_return_code_;
+ }
+
+ int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) override {
+ ++encode_count_;
+ last_video_frame_ = frame;
+ if (encode_complete_callback_ &&
+ encode_return_code_ == WEBRTC_VIDEO_CODEC_OK) {
+ encode_complete_callback_->OnEncodedImage(EncodedImage(), nullptr);
+ }
+ return encode_return_code_;
+ }
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override {
+ encode_complete_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t Release() override {
+ ++release_count_;
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ void SetRates(const RateControlParameters& parameters) override {}
+
+ EncoderInfo GetEncoderInfo() const override {
+ ++supports_native_handle_count_;
+ EncoderInfo info;
+ info.scaling_settings = ScalingSettings(kLowThreshold, kHighThreshold);
+ info.supports_native_handle = supports_native_handle_;
+ info.implementation_name = implementation_name_;
+ if (is_qp_trusted_)
+ info.is_qp_trusted = is_qp_trusted_;
+ return info;
+ }
+
+ int init_encode_count_ = 0;
+ int32_t init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ int32_t encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ int encode_count_ = 0;
+ EncodedImageCallback* encode_complete_callback_ = nullptr;
+ int release_count_ = 0;
+ mutable int supports_native_handle_count_ = 0;
+ bool supports_native_handle_ = false;
+ bool is_qp_trusted_ = false;
+ std::string implementation_name_ = "fake-encoder";
+ absl::optional<VideoFrame> last_video_frame_;
+ };
+
+ void InitEncode();
+ void UtilizeFallbackEncoder();
+ void FallbackFromEncodeRequest();
+ void EncodeFrame();
+ void EncodeFrame(int expected_ret);
+ void CheckLastEncoderName(const char* expected_name) {
+ EXPECT_EQ(expected_name,
+ fallback_wrapper_->GetEncoderInfo().implementation_name);
+ }
+
+ test::ScopedFieldTrials override_field_trials_;
+ FakeEncodedImageCallback callback_;
+ // `fake_encoder_` is owned and released by `fallback_wrapper_`.
+ CountingFakeEncoder* fake_encoder_;
+ CountingFakeEncoder* fake_sw_encoder_;
+ bool wrapper_initialized_;
+ std::unique_ptr<VideoEncoder> fallback_wrapper_;
+ VideoCodec codec_ = {};
+ std::unique_ptr<VideoFrame> frame_;
+ std::unique_ptr<SimulcastRateAllocator> rate_allocator_;
+};
+
+class VideoEncoderSoftwareFallbackWrapperTest
+ : public VideoEncoderSoftwareFallbackWrapperTestBase {
+ protected:
+ VideoEncoderSoftwareFallbackWrapperTest()
+ : VideoEncoderSoftwareFallbackWrapperTest(new CountingFakeEncoder()) {}
+ explicit VideoEncoderSoftwareFallbackWrapperTest(
+ CountingFakeEncoder* fake_sw_encoder)
+ : VideoEncoderSoftwareFallbackWrapperTestBase(
+ "",
+ std::unique_ptr<VideoEncoder>(fake_sw_encoder)),
+ fake_sw_encoder_(fake_sw_encoder) {
+ fake_sw_encoder_->implementation_name_ = "fake_sw_encoder";
+ }
+
+ CountingFakeEncoder* fake_sw_encoder_;
+};
+
+void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame() {
+ EncodeFrame(WEBRTC_VIDEO_CODEC_OK);
+}
+
+void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame(
+ int expected_ret) {
+ rtc::scoped_refptr<I420Buffer> buffer =
+ I420Buffer::Create(codec_.width, codec_.height);
+ I420Buffer::SetBlack(buffer.get());
+ std::vector<VideoFrameType> types(1, VideoFrameType::kVideoFrameKey);
+
+ frame_ =
+ std::make_unique<VideoFrame>(VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(webrtc::kVideoRotation_0)
+ .set_timestamp_us(0)
+ .build());
+ EXPECT_EQ(expected_ret, fallback_wrapper_->Encode(*frame_, &types));
+}
+
+void VideoEncoderSoftwareFallbackWrapperTestBase::InitEncode() {
+ if (!wrapper_initialized_) {
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_);
+ EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_);
+ }
+
+ // Register fake encoder as main.
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_));
+
+ if (wrapper_initialized_) {
+ fallback_wrapper_->Release();
+ }
+
+ fake_encoder_->init_encode_return_code_ = WEBRTC_VIDEO_CODEC_OK;
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_->InitEncode(&codec_, kSettings));
+
+ if (!wrapper_initialized_) {
+ fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters(
+ rate_allocator_->Allocate(
+ VideoBitrateAllocationParameters(300000, kFramerate)),
+ kFramerate));
+ }
+ wrapper_initialized_ = true;
+}
+
+void VideoEncoderSoftwareFallbackWrapperTestBase::UtilizeFallbackEncoder() {
+ if (!wrapper_initialized_) {
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_);
+ EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_);
+ }
+
+ // Register with failing fake encoder. Should succeed with VP8 fallback.
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_));
+
+ if (wrapper_initialized_) {
+ fallback_wrapper_->Release();
+ }
+
+ fake_encoder_->init_encode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR;
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_->InitEncode(&codec_, kSettings));
+ fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters(
+ rate_allocator_->Allocate(
+ VideoBitrateAllocationParameters(300000, kFramerate)),
+ kFramerate));
+
+ int callback_count = callback_.callback_count_;
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+ EXPECT_EQ(callback_count + 1, callback_.callback_count_);
+}
+
+void VideoEncoderSoftwareFallbackWrapperTestBase::FallbackFromEncodeRequest() {
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_);
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_));
+ if (wrapper_initialized_) {
+ fallback_wrapper_->Release();
+ }
+ fallback_wrapper_->InitEncode(&codec_, kSettings);
+ fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters(
+ rate_allocator_->Allocate(
+ VideoBitrateAllocationParameters(300000, kFramerate)),
+ kFramerate));
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+
+ // Have the non-fallback encoder request a software fallback.
+ fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ int callback_count = callback_.callback_count_;
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ // Single encode request, which returned failure.
+ EXPECT_EQ(encode_count + 1, fake_encoder_->encode_count_);
+ EXPECT_EQ(callback_count + 1, callback_.callback_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, InitializesEncoder) {
+ VideoCodec codec = {};
+ fallback_wrapper_->InitEncode(&codec, kSettings);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, EncodeRequestsFallback) {
+ FallbackFromEncodeRequest();
+ // After fallback, further encodes shouldn't hit the fake encoder.
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, CanUtilizeFallbackEncoder) {
+ UtilizeFallbackEncoder();
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ InternalEncoderReleasedDuringFallback) {
+ EXPECT_EQ(0, fake_encoder_->init_encode_count_);
+ EXPECT_EQ(0, fake_encoder_->release_count_);
+
+ InitEncode();
+
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EXPECT_EQ(0, fake_encoder_->release_count_);
+
+ UtilizeFallbackEncoder();
+
+ // One successful InitEncode(), one failed.
+ EXPECT_EQ(2, fake_encoder_->init_encode_count_);
+ EXPECT_EQ(1, fake_encoder_->release_count_);
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+
+ // No extra release when the fallback is released.
+ EXPECT_EQ(2, fake_encoder_->init_encode_count_);
+ EXPECT_EQ(1, fake_encoder_->release_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ InternalEncoderNotEncodingDuringFallback) {
+ UtilizeFallbackEncoder();
+ int encode_count = fake_encoder_->encode_count_;
+ EncodeFrame();
+ EXPECT_EQ(encode_count, fake_encoder_->encode_count_);
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ CanRegisterCallbackWhileUsingFallbackEncoder) {
+ InitEncode();
+ EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_);
+
+ UtilizeFallbackEncoder();
+
+ // Registering an encode-complete callback will now pass to the fallback
+ // instead of the main encoder.
+ FakeEncodedImageCallback callback2;
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback2);
+ EXPECT_EQ(&callback_, fake_encoder_->encode_complete_callback_);
+
+ // Encoding a frame using the fallback should arrive at the new callback.
+ std::vector<VideoFrameType> types(1, VideoFrameType::kVideoFrameKey);
+ frame_->set_timestamp(frame_->timestamp() + 1000);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types));
+ EXPECT_EQ(callback2.callback_count_, 1);
+
+ // Re-initialize to use the main encoder, the new callback should be in use.
+ InitEncode();
+ EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_);
+
+ frame_->set_timestamp(frame_->timestamp() + 2000);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types));
+ EXPECT_EQ(callback2.callback_count_, 2);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SupportsNativeHandleForwardedWithoutFallback) {
+ fallback_wrapper_->GetEncoderInfo();
+ EXPECT_EQ(1, fake_encoder_->supports_native_handle_count_);
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ SupportsNativeHandleNotForwardedDuringFallback) {
+ // Fake encoder signals support for native handle, default (libvpx) does not.
+ fake_encoder_->supports_native_handle_ = true;
+ EXPECT_TRUE(fallback_wrapper_->GetEncoderInfo().supports_native_handle);
+ UtilizeFallbackEncoder();
+ EXPECT_FALSE(fallback_wrapper_->GetEncoderInfo().supports_native_handle);
+ // Both times, both encoders are queried.
+ EXPECT_EQ(2, fake_encoder_->supports_native_handle_count_);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest, ReportsImplementationName) {
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_);
+ fallback_wrapper_->InitEncode(&codec_, kSettings);
+ EncodeFrame();
+ CheckLastEncoderName("fake-encoder");
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ IsQpTrustedNotForwardedDuringFallback) {
+ // Fake encoder signals trusted QP, default (libvpx) does not.
+ fake_encoder_->is_qp_trusted_ = true;
+ EXPECT_TRUE(fake_encoder_->GetEncoderInfo().is_qp_trusted.value_or(false));
+ UtilizeFallbackEncoder();
+ EXPECT_FALSE(fallback_wrapper_->GetEncoderInfo().is_qp_trusted.has_value());
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ ReportsFallbackImplementationName) {
+ UtilizeFallbackEncoder();
+ CheckLastEncoderName(fake_sw_encoder_->implementation_name_.c_str());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ OnEncodeFallbackNativeFrameScaledIfFallbackDoesNotSupportNativeFrames) {
+ fake_encoder_->supports_native_handle_ = true;
+ fake_sw_encoder_->supports_native_handle_ = false;
+ InitEncode();
+ int width = codec_.width * 2;
+ int height = codec_.height * 2;
+ VideoFrame native_frame = test::FakeNativeBuffer::CreateFrame(
+ width, height, 0, 0, VideoRotation::kVideoRotation_0);
+ std::vector<VideoFrameType> types(1, VideoFrameType::kVideoFrameKey);
+ fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_->Encode(native_frame, &types));
+ EXPECT_EQ(1, fake_sw_encoder_->encode_count_);
+ ASSERT_TRUE(fake_sw_encoder_->last_video_frame_.has_value());
+ EXPECT_NE(VideoFrameBuffer::Type::kNative,
+ fake_sw_encoder_->last_video_frame_->video_frame_buffer()->type());
+ EXPECT_EQ(codec_.width, fake_sw_encoder_->last_video_frame_->width());
+ EXPECT_EQ(codec_.height, fake_sw_encoder_->last_video_frame_->height());
+}
+
+TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
+ OnEncodeFallbackNativeFrameForwardedToFallbackIfItSupportsNativeFrames) {
+ fake_encoder_->supports_native_handle_ = true;
+ fake_sw_encoder_->supports_native_handle_ = true;
+ InitEncode();
+ int width = codec_.width * 2;
+ int height = codec_.height * 2;
+ VideoFrame native_frame = test::FakeNativeBuffer::CreateFrame(
+ width, height, 0, 0, VideoRotation::kVideoRotation_0);
+ std::vector<VideoFrameType> types(1, VideoFrameType::kVideoFrameKey);
+ fake_encoder_->encode_return_code_ = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_->Encode(native_frame, &types));
+ EXPECT_EQ(1, fake_sw_encoder_->encode_count_);
+ ASSERT_TRUE(fake_sw_encoder_->last_video_frame_.has_value());
+ EXPECT_EQ(VideoFrameBuffer::Type::kNative,
+ fake_sw_encoder_->last_video_frame_->video_frame_buffer()->type());
+ EXPECT_EQ(native_frame.width(), fake_sw_encoder_->last_video_frame_->width());
+ EXPECT_EQ(native_frame.height(),
+ fake_sw_encoder_->last_video_frame_->height());
+}
+
+namespace {
+const int kBitrateKbps = 200;
+const int kMinPixelsPerFrame = 1;
+const char kFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2";
+} // namespace
+
+class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTestBase {
+ public:
+ explicit ForcedFallbackTest(const std::string& field_trials)
+ : VideoEncoderSoftwareFallbackWrapperTestBase(field_trials,
+ VP8Encoder::Create()) {}
+
+ ~ForcedFallbackTest() override {}
+
+ protected:
+ void SetUp() override {
+ clock_.SetTime(Timestamp::Micros(1234));
+ ConfigureVp8Codec();
+ }
+
+ void TearDown() override {
+ if (wrapper_initialized_) {
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Release());
+ }
+ }
+
+ void ConfigureVp8Codec() {
+ codec_.codecType = kVideoCodecVP8;
+ codec_.maxFramerate = kFramerate;
+ codec_.width = kWidth;
+ codec_.height = kHeight;
+ codec_.VP8()->numberOfTemporalLayers = 1;
+ codec_.VP8()->automaticResizeOn = true;
+ codec_.SetFrameDropEnabled(true);
+ rate_allocator_.reset(new SimulcastRateAllocator(codec_));
+ }
+
+ void InitEncode(int width, int height) {
+ codec_.width = width;
+ codec_.height = height;
+ if (wrapper_initialized_) {
+ fallback_wrapper_->Release();
+ }
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ fallback_wrapper_->InitEncode(&codec_, kSettings));
+ fallback_wrapper_->RegisterEncodeCompleteCallback(&callback_);
+ wrapper_initialized_ = true;
+ SetRateAllocation(kBitrateKbps);
+ }
+
+ void SetRateAllocation(uint32_t bitrate_kbps) {
+ fallback_wrapper_->SetRates(VideoEncoder::RateControlParameters(
+ rate_allocator_->Allocate(
+ VideoBitrateAllocationParameters(bitrate_kbps * 1000, kFramerate)),
+ kFramerate));
+ }
+
+ void EncodeFrameAndVerifyLastName(const char* expected_name) {
+ EncodeFrameAndVerifyLastName(expected_name, WEBRTC_VIDEO_CODEC_OK);
+ }
+
+ void EncodeFrameAndVerifyLastName(const char* expected_name,
+ int expected_ret) {
+ EncodeFrame(expected_ret);
+ CheckLastEncoderName(expected_name);
+ }
+
+ rtc::ScopedFakeClock clock_;
+};
+
+class ForcedFallbackTestEnabled : public ForcedFallbackTest {
+ public:
+ ForcedFallbackTestEnabled()
+ : ForcedFallbackTest(std::string(kFieldTrial) + "/Enabled-" +
+ std::to_string(kMinPixelsPerFrame) + "," +
+ std::to_string(kWidth * kHeight) + ",30000/") {}
+};
+
+class ForcedFallbackTestDisabled : public ForcedFallbackTest {
+ public:
+ ForcedFallbackTestDisabled()
+ : ForcedFallbackTest(std::string(kFieldTrial) + "/Disabled/") {}
+};
+
+TEST_F(ForcedFallbackTestDisabled, NoFallbackWithoutFieldTrial) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIfAtMaxResolutionLimit) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsKeptWhenInitEncodeIsCalled) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder, still expect fallback.
+ InitEncode(kWidth / 2, kHeight / 2);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_); // No change.
+ EncodeFrameAndVerifyLastName("libvpx");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsEndedWhenResolutionIsTooLarge) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder with a larger resolution, expect no fallback.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(2, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+}
+
+TEST_F(ForcedFallbackTestEnabled, FallbackIsEndedForNonValidSettings) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Re-initialize encoder with invalid setting, expect no fallback.
+ codec_.numberOfSimulcastStreams = 2;
+ InitEncode(kWidth, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Re-initialize encoder with valid setting.
+ codec_.numberOfSimulcastStreams = 1;
+ InitEncode(kWidth, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("libvpx");
+}
+
+TEST_F(ForcedFallbackTestEnabled, MultipleStartEndFallback) {
+ const int kNumRuns = 5;
+ for (int i = 1; i <= kNumRuns; ++i) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(i, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+ }
+}
+
+TEST_F(ForcedFallbackTestDisabled, GetScaleSettings) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EXPECT_EQ(1, fake_encoder_->init_encode_count_);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Default min pixels per frame should be used.
+ const auto settings = fallback_wrapper_->GetEncoderInfo().scaling_settings;
+ EXPECT_TRUE(settings.thresholds.has_value());
+ EXPECT_EQ(kDefaultMinPixelsPerFrame, settings.min_pixels_per_frame);
+}
+
+TEST_F(ForcedFallbackTestEnabled, GetScaleSettingsWithNoFallback) {
+ // Resolution above max threshold.
+ InitEncode(kWidth + 1, kHeight);
+ EncodeFrameAndVerifyLastName("fake-encoder");
+
+ // Configured min pixels per frame should be used.
+ const auto settings = fallback_wrapper_->GetEncoderInfo().scaling_settings;
+ EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame);
+ ASSERT_TRUE(settings.thresholds);
+ EXPECT_EQ(kLowThreshold, settings.thresholds->low);
+ EXPECT_EQ(kHighThreshold, settings.thresholds->high);
+}
+
+TEST_F(ForcedFallbackTestEnabled, GetScaleSettingsWithFallback) {
+ // Resolution at max threshold.
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Configured min pixels per frame should be used.
+ const auto settings = fallback_wrapper_->GetEncoderInfo().scaling_settings;
+ EXPECT_TRUE(settings.thresholds.has_value());
+ EXPECT_EQ(kMinPixelsPerFrame, settings.min_pixels_per_frame);
+}
+
+TEST_F(ForcedFallbackTestEnabled, ScalingDisabledIfResizeOff) {
+ // Resolution at max threshold.
+ codec_.VP8()->automaticResizeOn = false;
+ InitEncode(kWidth, kHeight);
+ EncodeFrameAndVerifyLastName("libvpx");
+
+ // Should be disabled for automatic resize off.
+ const auto settings = fallback_wrapper_->GetEncoderInfo().scaling_settings;
+ EXPECT_FALSE(settings.thresholds.has_value());
+}
+
+TEST(SoftwareFallbackEncoderTest, BothRateControllersNotTrusted) {
+ auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+
+ EXPECT_CALL(*sw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(false)));
+ EXPECT_CALL(*hw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(false)));
+
+ std::unique_ptr<VideoEncoder> wrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_encoder),
+ std::unique_ptr<VideoEncoder>(hw_encoder));
+ EXPECT_FALSE(wrapper->GetEncoderInfo().has_trusted_rate_controller);
+}
+
+TEST(SoftwareFallbackEncoderTest, SwRateControllerTrusted) {
+ auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ EXPECT_CALL(*sw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(true)));
+ EXPECT_CALL(*hw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(false)));
+
+ std::unique_ptr<VideoEncoder> wrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_encoder),
+ std::unique_ptr<VideoEncoder>(hw_encoder));
+ EXPECT_FALSE(wrapper->GetEncoderInfo().has_trusted_rate_controller);
+}
+
+TEST(SoftwareFallbackEncoderTest, HwRateControllerTrusted) {
+ auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ EXPECT_CALL(*sw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(false)));
+ EXPECT_CALL(*hw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(true)));
+
+ std::unique_ptr<VideoEncoder> wrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_encoder),
+ std::unique_ptr<VideoEncoder>(hw_encoder));
+ EXPECT_TRUE(wrapper->GetEncoderInfo().has_trusted_rate_controller);
+
+ VideoCodec codec_ = {};
+ codec_.width = 100;
+ codec_.height = 100;
+ wrapper->InitEncode(&codec_, kSettings);
+
+ // Trigger fallback to software.
+ EXPECT_CALL(*hw_encoder, Encode)
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
+ VideoFrame frame = VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(100, 100))
+ .build();
+ wrapper->Encode(frame, nullptr);
+
+ EXPECT_FALSE(wrapper->GetEncoderInfo().has_trusted_rate_controller);
+}
+
+TEST(SoftwareFallbackEncoderTest, BothRateControllersTrusted) {
+ auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ EXPECT_CALL(*sw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(true)));
+ EXPECT_CALL(*hw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithTrustedRateController(true)));
+
+ std::unique_ptr<VideoEncoder> wrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_encoder),
+ std::unique_ptr<VideoEncoder>(hw_encoder));
+ EXPECT_TRUE(wrapper->GetEncoderInfo().has_trusted_rate_controller);
+}
+
+TEST(SoftwareFallbackEncoderTest, ReportsHardwareAccelerated) {
+ auto* sw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ auto* hw_encoder = new ::testing::NiceMock<MockVideoEncoder>();
+ EXPECT_CALL(*sw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithHardwareAccelerated(false)));
+ EXPECT_CALL(*hw_encoder, GetEncoderInfo())
+ .WillRepeatedly(Return(GetEncoderInfoWithHardwareAccelerated(true)));
+
+ std::unique_ptr<VideoEncoder> wrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_encoder),
+ std::unique_ptr<VideoEncoder>(hw_encoder));
+ EXPECT_TRUE(wrapper->GetEncoderInfo().is_hardware_accelerated);
+
+ VideoCodec codec_ = {};
+ codec_.width = 100;
+ codec_.height = 100;
+ wrapper->InitEncode(&codec_, kSettings);
+
+ // Trigger fallback to software.
+ EXPECT_CALL(*hw_encoder, Encode)
+ .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
+ VideoFrame frame = VideoFrame::Builder()
+ .set_video_frame_buffer(I420Buffer::Create(100, 100))
+ .build();
+ wrapper->Encode(frame, nullptr);
+ EXPECT_FALSE(wrapper->GetEncoderInfo().is_hardware_accelerated);
+}
+
+class PreferTemporalLayersFallbackTest : public ::testing::Test {
+ public:
+ PreferTemporalLayersFallbackTest() {}
+ void SetUp() override {
+ sw_ = new ::testing::NiceMock<MockVideoEncoder>();
+ sw_info_.implementation_name = "sw";
+ EXPECT_CALL(*sw_, GetEncoderInfo).WillRepeatedly([&]() {
+ return sw_info_;
+ });
+ EXPECT_CALL(*sw_, InitEncode(_, _, _))
+ .WillRepeatedly(Return(WEBRTC_VIDEO_CODEC_OK));
+
+ hw_ = new ::testing::NiceMock<MockVideoEncoder>();
+ hw_info_.implementation_name = "hw";
+ EXPECT_CALL(*hw_, GetEncoderInfo()).WillRepeatedly([&]() {
+ return hw_info_;
+ });
+ EXPECT_CALL(*hw_, InitEncode(_, _, _))
+ .WillRepeatedly(Return(WEBRTC_VIDEO_CODEC_OK));
+
+ wrapper_ = CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder>(sw_), std::unique_ptr<VideoEncoder>(hw_),
+ /*prefer_temporal_support=*/true);
+
+ codec_settings.codecType = kVideoCodecVP8;
+ codec_settings.maxFramerate = kFramerate;
+ codec_settings.width = kWidth;
+ codec_settings.height = kHeight;
+ codec_settings.numberOfSimulcastStreams = 1;
+ codec_settings.VP8()->numberOfTemporalLayers = 1;
+ }
+
+ protected:
+ void SetSupportsLayers(VideoEncoder::EncoderInfo* info, bool tl_enabled) {
+ int num_layers = 1;
+ if (tl_enabled) {
+ num_layers = codec_settings.VP8()->numberOfTemporalLayers;
+ }
+ SetNumLayers(info, num_layers);
+ }
+
+ void SetNumLayers(VideoEncoder::EncoderInfo* info, int num_layers) {
+ info->fps_allocation[0].clear();
+ for (int i = 0; i < num_layers; ++i) {
+ info->fps_allocation[0].push_back(
+ VideoEncoder::EncoderInfo::kMaxFramerateFraction >>
+ (num_layers - i - 1));
+ }
+ }
+
+ VideoCodec codec_settings;
+ ::testing::NiceMock<MockVideoEncoder>* sw_;
+ ::testing::NiceMock<MockVideoEncoder>* hw_;
+ VideoEncoder::EncoderInfo sw_info_;
+ VideoEncoder::EncoderInfo hw_info_;
+ std::unique_ptr<VideoEncoder> wrapper_;
+};
+
+TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenLayersNotUsed) {
+ codec_settings.VP8()->numberOfTemporalLayers = 1;
+ SetSupportsLayers(&hw_info_, true);
+ SetSupportsLayers(&sw_info_, true);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw");
+}
+
+TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenLayersSupported) {
+ codec_settings.VP8()->numberOfTemporalLayers = 2;
+ SetSupportsLayers(&hw_info_, true);
+ SetSupportsLayers(&sw_info_, true);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw");
+}
+
+TEST_F(PreferTemporalLayersFallbackTest,
+ UsesFallbackWhenLayersNotSupportedOnMain) {
+ codec_settings.VP8()->numberOfTemporalLayers = 2;
+ SetSupportsLayers(&hw_info_, false);
+ SetSupportsLayers(&sw_info_, true);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "sw");
+}
+
+TEST_F(PreferTemporalLayersFallbackTest, UsesMainWhenNeitherSupportsTemporal) {
+ codec_settings.VP8()->numberOfTemporalLayers = 2;
+ SetSupportsLayers(&hw_info_, false);
+ SetSupportsLayers(&sw_info_, false);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw");
+}
+
+TEST_F(PreferTemporalLayersFallbackTest, UsesFallbackWhenLayersAreUndefined) {
+ codec_settings.VP8()->numberOfTemporalLayers = 2;
+ SetNumLayers(&hw_info_, 1);
+ SetNumLayers(&sw_info_, 0);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "sw");
+}
+
+TEST_F(PreferTemporalLayersFallbackTest, PrimesEncoderOnSwitch) {
+ codec_settings.VP8()->numberOfTemporalLayers = 2;
+ // Both support temporal layers, will use main one.
+ SetSupportsLayers(&hw_info_, true);
+ SetSupportsLayers(&sw_info_, true);
+
+ // On first InitEncode most params have no state and will not be
+ // called to update.
+ EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0);
+ EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0);
+
+ EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0);
+ EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0);
+
+ EXPECT_CALL(*hw_, SetRates).Times(0);
+ EXPECT_CALL(*hw_, SetRates).Times(0);
+
+ EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0);
+ EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0);
+
+ EXPECT_CALL(*hw_, OnRttUpdate).Times(0);
+ EXPECT_CALL(*sw_, OnRttUpdate).Times(0);
+
+ EXPECT_CALL(*hw_, OnLossNotification).Times(0);
+ EXPECT_CALL(*sw_, OnLossNotification).Times(0);
+
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw");
+
+ FakeEncodedImageCallback callback1;
+ class DummyFecControllerOverride : public FecControllerOverride {
+ public:
+ void SetFecAllowed(bool fec_allowed) override {}
+ };
+ DummyFecControllerOverride fec_controller_override1;
+ VideoEncoder::RateControlParameters rate_params1;
+ float packet_loss1 = 0.1;
+ int64_t rtt1 = 1;
+ VideoEncoder::LossNotification lntf1;
+
+ EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback(&callback1));
+ EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0);
+ wrapper_->RegisterEncodeCompleteCallback(&callback1);
+
+ EXPECT_CALL(*hw_, SetFecControllerOverride(&fec_controller_override1));
+ EXPECT_CALL(*sw_, SetFecControllerOverride).Times(1);
+ wrapper_->SetFecControllerOverride(&fec_controller_override1);
+
+ EXPECT_CALL(*hw_, SetRates(rate_params1));
+ EXPECT_CALL(*sw_, SetRates).Times(0);
+ wrapper_->SetRates(rate_params1);
+
+ EXPECT_CALL(*hw_, OnPacketLossRateUpdate(packet_loss1));
+ EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0);
+ wrapper_->OnPacketLossRateUpdate(packet_loss1);
+
+ EXPECT_CALL(*hw_, OnRttUpdate(rtt1));
+ EXPECT_CALL(*sw_, OnRttUpdate).Times(0);
+ wrapper_->OnRttUpdate(rtt1);
+
+ EXPECT_CALL(*hw_, OnLossNotification).Times(1);
+ EXPECT_CALL(*sw_, OnLossNotification).Times(0);
+ wrapper_->OnLossNotification(lntf1);
+
+ // Release and re-init, with fallback to software. This should trigger
+ // the software encoder to be primed with the current state.
+ wrapper_->Release();
+ EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback(&callback1));
+ EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0);
+
+ EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0);
+ EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0);
+
+ // Rate control parameters are cleared on InitEncode.
+ EXPECT_CALL(*sw_, SetRates).Times(0);
+ EXPECT_CALL(*hw_, SetRates).Times(0);
+
+ EXPECT_CALL(*sw_, OnPacketLossRateUpdate(packet_loss1));
+ EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0);
+
+ EXPECT_CALL(*sw_, OnRttUpdate(rtt1));
+ EXPECT_CALL(*hw_, OnRttUpdate).Times(0);
+
+ EXPECT_CALL(*sw_, OnLossNotification).Times(1);
+ EXPECT_CALL(*hw_, OnLossNotification).Times(0);
+
+ SetSupportsLayers(&hw_info_, false);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "sw");
+
+ // Update with all-new params for the software encoder.
+ FakeEncodedImageCallback callback2;
+ DummyFecControllerOverride fec_controller_override2;
+ VideoEncoder::RateControlParameters rate_params2;
+ float packet_loss2 = 0.2;
+ int64_t rtt2 = 2;
+ VideoEncoder::LossNotification lntf2;
+
+ EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback(&callback2));
+ EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback).Times(0);
+ wrapper_->RegisterEncodeCompleteCallback(&callback2);
+
+ EXPECT_CALL(*sw_, SetFecControllerOverride(&fec_controller_override2));
+ EXPECT_CALL(*hw_, SetFecControllerOverride).Times(1);
+ wrapper_->SetFecControllerOverride(&fec_controller_override2);
+
+ EXPECT_CALL(*sw_, SetRates(rate_params2));
+ EXPECT_CALL(*hw_, SetRates).Times(0);
+ wrapper_->SetRates(rate_params2);
+
+ EXPECT_CALL(*sw_, OnPacketLossRateUpdate(packet_loss2));
+ EXPECT_CALL(*hw_, OnPacketLossRateUpdate).Times(0);
+ wrapper_->OnPacketLossRateUpdate(packet_loss2);
+
+ EXPECT_CALL(*sw_, OnRttUpdate(rtt2));
+ EXPECT_CALL(*hw_, OnRttUpdate).Times(0);
+ wrapper_->OnRttUpdate(rtt2);
+
+ EXPECT_CALL(*sw_, OnLossNotification).Times(1);
+ EXPECT_CALL(*hw_, OnLossNotification).Times(0);
+ wrapper_->OnLossNotification(lntf2);
+
+ // Release and re-init, back to main encoder. This should trigger
+ // the main encoder to be primed with the current state.
+ wrapper_->Release();
+ EXPECT_CALL(*hw_, RegisterEncodeCompleteCallback(&callback2));
+ EXPECT_CALL(*sw_, RegisterEncodeCompleteCallback).Times(0);
+
+ EXPECT_CALL(*hw_, SetFecControllerOverride).Times(0);
+ EXPECT_CALL(*sw_, SetFecControllerOverride).Times(0);
+
+ // Rate control parameters are cleared on InitEncode.
+ EXPECT_CALL(*sw_, SetRates).Times(0);
+ EXPECT_CALL(*hw_, SetRates).Times(0);
+
+ EXPECT_CALL(*hw_, OnPacketLossRateUpdate(packet_loss2));
+ EXPECT_CALL(*sw_, OnPacketLossRateUpdate).Times(0);
+
+ EXPECT_CALL(*hw_, OnRttUpdate(rtt2));
+ EXPECT_CALL(*sw_, OnRttUpdate).Times(0);
+
+ EXPECT_CALL(*hw_, OnLossNotification).Times(1);
+ EXPECT_CALL(*sw_, OnLossNotification).Times(0);
+
+ SetSupportsLayers(&hw_info_, true);
+ EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
+ wrapper_->InitEncode(&codec_settings, kSettings));
+ EXPECT_EQ(wrapper_->GetEncoderInfo().implementation_name, "hw");
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_codec.cc b/third_party/libwebrtc/api/video_codecs/video_codec.cc
new file mode 100644
index 0000000000..c6122d3f6a
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_codec.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_codec.h"
+
+#include <string.h>
+
+#include <string>
+
+#include "absl/strings/match.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace {
+constexpr char kPayloadNameVp8[] = "VP8";
+constexpr char kPayloadNameVp9[] = "VP9";
+constexpr char kPayloadNameAv1[] = "AV1";
+// TODO(bugs.webrtc.org/13166): Remove AV1X when backwards compatibility is not
+// needed.
+constexpr char kPayloadNameAv1x[] = "AV1X";
+constexpr char kPayloadNameH264[] = "H264";
+constexpr char kPayloadNameGeneric[] = "Generic";
+constexpr char kPayloadNameMultiplex[] = "Multiplex";
+} // namespace
+
+bool VideoCodecVP8::operator==(const VideoCodecVP8& other) const {
+ return (numberOfTemporalLayers == other.numberOfTemporalLayers &&
+ denoisingOn == other.denoisingOn &&
+ automaticResizeOn == other.automaticResizeOn &&
+ keyFrameInterval == other.keyFrameInterval);
+}
+
+bool VideoCodecVP9::operator==(const VideoCodecVP9& other) const {
+ return (numberOfTemporalLayers == other.numberOfTemporalLayers &&
+ denoisingOn == other.denoisingOn &&
+ keyFrameInterval == other.keyFrameInterval &&
+ adaptiveQpMode == other.adaptiveQpMode &&
+ automaticResizeOn == other.automaticResizeOn &&
+ numberOfSpatialLayers == other.numberOfSpatialLayers &&
+ flexibleMode == other.flexibleMode);
+}
+
+bool VideoCodecH264::operator==(const VideoCodecH264& other) const {
+ return (keyFrameInterval == other.keyFrameInterval &&
+ numberOfTemporalLayers == other.numberOfTemporalLayers);
+}
+
+VideoCodec::VideoCodec()
+ : codecType(kVideoCodecGeneric),
+ width(0),
+ height(0),
+ startBitrate(0),
+ maxBitrate(0),
+ minBitrate(0),
+ maxFramerate(0),
+ active(true),
+ qpMax(0),
+ numberOfSimulcastStreams(0),
+ simulcastStream(),
+ spatialLayers(),
+ mode(VideoCodecMode::kRealtimeVideo),
+ expect_encode_from_texture(false),
+ timing_frame_thresholds({0, 0}),
+ legacy_conference_mode(false),
+ codec_specific_(),
+ complexity_(VideoCodecComplexity::kComplexityNormal) {}
+
+VideoCodecVP8* VideoCodec::VP8() {
+ RTC_DCHECK_EQ(codecType, kVideoCodecVP8);
+ return &codec_specific_.VP8;
+}
+
+const VideoCodecVP8& VideoCodec::VP8() const {
+ RTC_DCHECK_EQ(codecType, kVideoCodecVP8);
+ return codec_specific_.VP8;
+}
+
+VideoCodecVP9* VideoCodec::VP9() {
+ RTC_DCHECK_EQ(codecType, kVideoCodecVP9);
+ return &codec_specific_.VP9;
+}
+
+const VideoCodecVP9& VideoCodec::VP9() const {
+ RTC_DCHECK_EQ(codecType, kVideoCodecVP9);
+ return codec_specific_.VP9;
+}
+
+VideoCodecH264* VideoCodec::H264() {
+ RTC_DCHECK_EQ(codecType, kVideoCodecH264);
+ return &codec_specific_.H264;
+}
+
+const VideoCodecH264& VideoCodec::H264() const {
+ RTC_DCHECK_EQ(codecType, kVideoCodecH264);
+ return codec_specific_.H264;
+}
+
+const char* CodecTypeToPayloadString(VideoCodecType type) {
+ switch (type) {
+ case kVideoCodecVP8:
+ return kPayloadNameVp8;
+ case kVideoCodecVP9:
+ return kPayloadNameVp9;
+ case kVideoCodecAV1:
+ return kPayloadNameAv1;
+ case kVideoCodecH264:
+ return kPayloadNameH264;
+ case kVideoCodecMultiplex:
+ return kPayloadNameMultiplex;
+ case kVideoCodecGeneric:
+ return kPayloadNameGeneric;
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+VideoCodecType PayloadStringToCodecType(const std::string& name) {
+ if (absl::EqualsIgnoreCase(name, kPayloadNameVp8))
+ return kVideoCodecVP8;
+ if (absl::EqualsIgnoreCase(name, kPayloadNameVp9))
+ return kVideoCodecVP9;
+ if (absl::EqualsIgnoreCase(name, kPayloadNameAv1) ||
+ absl::EqualsIgnoreCase(name, kPayloadNameAv1x))
+ return kVideoCodecAV1;
+ if (absl::EqualsIgnoreCase(name, kPayloadNameH264))
+ return kVideoCodecH264;
+ if (absl::EqualsIgnoreCase(name, kPayloadNameMultiplex))
+ return kVideoCodecMultiplex;
+ return kVideoCodecGeneric;
+}
+
+VideoCodecComplexity VideoCodec::GetVideoEncoderComplexity() const {
+ return complexity_;
+}
+
+void VideoCodec::SetVideoEncoderComplexity(
+ VideoCodecComplexity complexity_setting) {
+ complexity_ = complexity_setting;
+}
+
+bool VideoCodec::GetFrameDropEnabled() const {
+ return frame_drop_enabled_;
+}
+
+void VideoCodec::SetFrameDropEnabled(bool enabled) {
+ frame_drop_enabled_ = enabled;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_codec.h b/third_party/libwebrtc/api/video_codecs/video_codec.h
new file mode 100644
index 0000000000..10bceda0d2
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_codec.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_CODEC_H_
+#define API_VIDEO_CODECS_VIDEO_CODEC_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_codec_type.h"
+#include "api/video_codecs/scalability_mode.h"
+#include "api/video_codecs/simulcast_stream.h"
+#include "api/video_codecs/spatial_layer.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// The VideoCodec class represents an old defacto-apis, which we're migrating
+// away from slowly.
+
+// Video codec
+enum class VideoCodecComplexity {
+ kComplexityLow = -1,
+ kComplexityNormal = 0,
+ kComplexityHigh = 1,
+ kComplexityHigher = 2,
+ kComplexityMax = 3
+};
+
+// VP8 specific
+struct VideoCodecVP8 {
+ bool operator==(const VideoCodecVP8& other) const;
+ bool operator!=(const VideoCodecVP8& other) const {
+ return !(*this == other);
+ }
+ // Temporary utility method for transition deleting numberOfTemporalLayers
+ // setting (replaced by ScalabilityMode).
+ void SetNumberOfTemporalLayers(unsigned char n) {
+ numberOfTemporalLayers = n;
+ }
+ unsigned char numberOfTemporalLayers;
+ bool denoisingOn;
+ bool automaticResizeOn;
+ int keyFrameInterval;
+};
+
+enum class InterLayerPredMode : int {
+ kOff = 0, // Inter-layer prediction is disabled.
+ kOn = 1, // Inter-layer prediction is enabled.
+ kOnKeyPic = 2 // Inter-layer prediction is enabled but limited to key frames.
+};
+
+// VP9 specific.
+struct VideoCodecVP9 {
+ bool operator==(const VideoCodecVP9& other) const;
+ bool operator!=(const VideoCodecVP9& other) const {
+ return !(*this == other);
+ }
+ // Temporary utility method for transition deleting numberOfTemporalLayers
+ // setting (replaced by ScalabilityMode).
+ void SetNumberOfTemporalLayers(unsigned char n) {
+ numberOfTemporalLayers = n;
+ }
+ unsigned char numberOfTemporalLayers;
+ bool denoisingOn;
+ int keyFrameInterval;
+ bool adaptiveQpMode;
+ bool automaticResizeOn;
+ unsigned char numberOfSpatialLayers;
+ bool flexibleMode;
+ InterLayerPredMode interLayerPred;
+};
+
+// H264 specific.
+struct VideoCodecH264 {
+ bool operator==(const VideoCodecH264& other) const;
+ bool operator!=(const VideoCodecH264& other) const {
+ return !(*this == other);
+ }
+ // Temporary utility method for transition deleting numberOfTemporalLayers
+ // setting (replaced by ScalabilityMode).
+ void SetNumberOfTemporalLayers(unsigned char n) {
+ numberOfTemporalLayers = n;
+ }
+ int keyFrameInterval;
+ uint8_t numberOfTemporalLayers;
+};
+
+// Translates from name of codec to codec type and vice versa.
+RTC_EXPORT const char* CodecTypeToPayloadString(VideoCodecType type);
+RTC_EXPORT VideoCodecType PayloadStringToCodecType(const std::string& name);
+
+union VideoCodecUnion {
+ VideoCodecVP8 VP8;
+ VideoCodecVP9 VP9;
+ VideoCodecH264 H264;
+};
+
+enum class VideoCodecMode { kRealtimeVideo, kScreensharing };
+
+// Common video codec properties
+class RTC_EXPORT VideoCodec {
+ public:
+ VideoCodec();
+
+ // Scalability mode as described in
+ // https://www.w3.org/TR/webrtc-svc/#scalabilitymodes*
+ absl::optional<ScalabilityMode> GetScalabilityMode() const {
+ return scalability_mode_;
+ }
+ void SetScalabilityMode(ScalabilityMode scalability_mode) {
+ scalability_mode_ = scalability_mode;
+ }
+ void UnsetScalabilityMode() { scalability_mode_ = absl::nullopt; }
+
+ VideoCodecComplexity GetVideoEncoderComplexity() const;
+ void SetVideoEncoderComplexity(VideoCodecComplexity complexity_setting);
+
+ bool GetFrameDropEnabled() const;
+ void SetFrameDropEnabled(bool enabled);
+
+ // Public variables. TODO(hta): Make them private with accessors.
+ VideoCodecType codecType;
+
+ // TODO(nisse): Change to int, for consistency.
+ uint16_t width;
+ uint16_t height;
+
+ unsigned int startBitrate; // kilobits/sec.
+ unsigned int maxBitrate; // kilobits/sec.
+ unsigned int minBitrate; // kilobits/sec.
+
+ uint32_t maxFramerate;
+
+ // This enables/disables encoding and sending when there aren't multiple
+ // simulcast streams,by allocating 0 bitrate if inactive.
+ bool active;
+
+ unsigned int qpMax;
+ unsigned char numberOfSimulcastStreams;
+ SimulcastStream simulcastStream[kMaxSimulcastStreams];
+ SpatialLayer spatialLayers[kMaxSpatialLayers];
+
+ VideoCodecMode mode;
+ bool expect_encode_from_texture;
+
+ // Timing frames configuration. There is delay of delay_ms between two
+ // consequent timing frames, excluding outliers. Frame is always made a
+ // timing frame if it's at least outlier_ratio in percent of "ideal" average
+ // frame given bitrate and framerate, i.e. if it's bigger than
+ // |outlier_ratio / 100.0 * bitrate_bps / fps| in bits. This way, timing
+ // frames will not be sent too often usually. Yet large frames will always
+ // have timing information for debug purposes because they are more likely to
+ // cause extra delays.
+ struct TimingFrameTriggerThresholds {
+ int64_t delay_ms;
+ uint16_t outlier_ratio_percent;
+ } timing_frame_thresholds;
+
+ // Legacy Google conference mode flag for simulcast screenshare
+ bool legacy_conference_mode;
+
+ bool operator==(const VideoCodec& other) const = delete;
+ bool operator!=(const VideoCodec& other) const = delete;
+
+ // Accessors for codec specific information.
+ // There is a const version of each that returns a reference,
+ // and a non-const version that returns a pointer, in order
+ // to allow modification of the parameters.
+ VideoCodecVP8* VP8();
+ const VideoCodecVP8& VP8() const;
+ VideoCodecVP9* VP9();
+ const VideoCodecVP9& VP9() const;
+ VideoCodecH264* H264();
+ const VideoCodecH264& H264() const;
+
+ private:
+ // TODO(hta): Consider replacing the union with a pointer type.
+ // This will allow removing the VideoCodec* types from this file.
+ VideoCodecUnion codec_specific_;
+ absl::optional<ScalabilityMode> scalability_mode_;
+ // 'complexity_' indicates the CPU capability of the client. It's used to
+ // determine encoder CPU complexity (e.g., cpu_used for VP8, VP9. and AV1).
+ VideoCodecComplexity complexity_;
+ bool frame_drop_enabled_ = false;
+};
+
+} // namespace webrtc
+#endif // API_VIDEO_CODECS_VIDEO_CODEC_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build b/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build
new file mode 100644
index 0000000000..eccdc6cfc7
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video_codecs/av1_profile.cc",
+ "/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc",
+ "/third_party/libwebrtc/api/video_codecs/sdp_video_format.cc",
+ "/third_party/libwebrtc/api/video_codecs/simulcast_stream.cc",
+ "/third_party/libwebrtc/api/video_codecs/spatial_layer.cc",
+ "/third_party/libwebrtc/api/video_codecs/video_codec.cc",
+ "/third_party/libwebrtc/api/video_codecs/video_decoder.cc",
+ "/third_party/libwebrtc/api/video_codecs/video_encoder.cc",
+ "/third_party/libwebrtc/api/video_codecs/vp8_frame_config.cc",
+ "/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.cc",
+ "/third_party/libwebrtc/api/video_codecs/vp9_profile.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_codecs_api_gn")
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder.cc b/third_party/libwebrtc/api/video_codecs/video_decoder.cc
new file mode 100644
index 0000000000..c8f40cee7f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder.cc
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_decoder.h"
+
+#include "absl/types/optional.h"
+#include "api/video/render_resolution.h"
+#include "api/video/video_codec_type.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+int32_t DecodedImageCallback::Decoded(VideoFrame& decodedImage,
+ int64_t decode_time_ms) {
+ // The default implementation ignores custom decode time value.
+ return Decoded(decodedImage);
+}
+
+void DecodedImageCallback::Decoded(VideoFrame& decodedImage,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp) {
+ Decoded(decodedImage, decode_time_ms.value_or(-1));
+}
+
+VideoDecoder::DecoderInfo VideoDecoder::GetDecoderInfo() const {
+ DecoderInfo info;
+ info.implementation_name = ImplementationName();
+ return info;
+}
+
+const char* VideoDecoder::ImplementationName() const {
+ return "unknown";
+}
+
+std::string VideoDecoder::DecoderInfo::ToString() const {
+ char string_buf[2048];
+ rtc::SimpleStringBuilder oss(string_buf);
+
+ oss << "DecoderInfo { "
+ << "prefers_late_decoding = "
+ << "implementation_name = '" << implementation_name << "', "
+ << "is_hardware_accelerated = "
+ << (is_hardware_accelerated ? "true" : "false") << " }";
+ return oss.str();
+}
+
+bool VideoDecoder::DecoderInfo::operator==(const DecoderInfo& rhs) const {
+ return is_hardware_accelerated == rhs.is_hardware_accelerated &&
+ implementation_name == rhs.implementation_name;
+}
+
+void VideoDecoder::Settings::set_number_of_cores(int value) {
+ RTC_DCHECK_GT(value, 0);
+ number_of_cores_ = value;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder.h b/third_party/libwebrtc/api/video_codecs/video_decoder.h
new file mode 100644
index 0000000000..aa7ee24307
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video/encoded_image.h"
+#include "api/video/render_resolution.h"
+#include "api/video/video_codec_type.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class RTC_EXPORT DecodedImageCallback {
+ public:
+ virtual ~DecodedImageCallback() {}
+
+ virtual int32_t Decoded(VideoFrame& decodedImage) = 0;
+ // Provides an alternative interface that allows the decoder to specify the
+ // decode time excluding waiting time for any previous pending frame to
+ // return. This is necessary for breaking positive feedback in the delay
+ // estimation when the decoder has a single output buffer.
+ virtual int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms);
+
+ // TODO(sakal): Remove other implementations when upstream projects have been
+ // updated.
+ virtual void Decoded(VideoFrame& decodedImage,
+ absl::optional<int32_t> decode_time_ms,
+ absl::optional<uint8_t> qp);
+};
+
+class RTC_EXPORT VideoDecoder {
+ public:
+ struct DecoderInfo {
+ // Descriptive name of the decoder implementation.
+ std::string implementation_name;
+
+ // True if the decoder is backed by hardware acceleration.
+ bool is_hardware_accelerated = false;
+
+ std::string ToString() const;
+ bool operator==(const DecoderInfo& rhs) const;
+ bool operator!=(const DecoderInfo& rhs) const { return !(*this == rhs); }
+ };
+
+ class Settings {
+ public:
+ Settings() = default;
+ Settings(const Settings&) = default;
+ Settings& operator=(const Settings&) = default;
+ ~Settings() = default;
+
+ // The size of pool which is used to store video frame buffers inside
+ // decoder. If value isn't present some codec-default value will be used. If
+ // value is present and decoder doesn't have buffer pool the value will be
+ // ignored.
+ absl::optional<int> buffer_pool_size() const;
+ void set_buffer_pool_size(absl::optional<int> value);
+
+ // When valid, user of the VideoDecoder interface shouldn't `Decode`
+ // encoded images with render resolution larger than width and height
+ // specified here.
+ RenderResolution max_render_resolution() const;
+ void set_max_render_resolution(RenderResolution value);
+
+ // Maximum number of cpu cores the decoder is allowed to use in parallel.
+ // Must be positive.
+ int number_of_cores() const { return number_of_cores_; }
+ void set_number_of_cores(int value);
+
+ // Codec of encoded images user of the VideoDecoder interface will `Decode`.
+ VideoCodecType codec_type() const { return codec_type_; }
+ void set_codec_type(VideoCodecType value) { codec_type_ = value; }
+
+ private:
+ absl::optional<int> buffer_pool_size_;
+ RenderResolution max_resolution_;
+ int number_of_cores_ = 1;
+ VideoCodecType codec_type_ = kVideoCodecGeneric;
+ };
+
+ virtual ~VideoDecoder() = default;
+
+ // Prepares decoder to handle incoming encoded frames. Can be called multiple
+ // times, in such case only latest `settings` are in effect.
+ virtual bool Configure(const Settings& settings) = 0;
+
+ virtual int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) = 0;
+
+ virtual int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) = 0;
+
+ virtual int32_t Release() = 0;
+
+ virtual DecoderInfo GetDecoderInfo() const;
+
+ // Deprecated, use GetDecoderInfo().implementation_name instead.
+ virtual const char* ImplementationName() const;
+};
+
+inline absl::optional<int> VideoDecoder::Settings::buffer_pool_size() const {
+ return buffer_pool_size_;
+}
+
+inline void VideoDecoder::Settings::set_buffer_pool_size(
+ absl::optional<int> value) {
+ buffer_pool_size_ = value;
+}
+
+inline RenderResolution VideoDecoder::Settings::max_render_resolution() const {
+ return max_resolution_;
+}
+
+inline void VideoDecoder::Settings::set_max_render_resolution(
+ RenderResolution value) {
+ max_resolution_ = value;
+}
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory.h
new file mode 100644
index 0000000000..7e1d2ee883
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+class VideoDecoder;
+
+// A factory that creates VideoDecoders.
+// NOTE: This class is still under development and may change without notice.
+class RTC_EXPORT VideoDecoderFactory {
+ public:
+ struct CodecSupport {
+ bool is_supported = false;
+ bool is_power_efficient = false;
+ };
+
+ // Returns a list of supported video formats in order of preference, to use
+ // for signaling etc.
+ virtual std::vector<SdpVideoFormat> GetSupportedFormats() const = 0;
+
+ // Query whether the specifed format is supported or not and if it will be
+ // power efficient, which is currently interpreted as if there is support for
+ // hardware acceleration.
+ // The parameter `reference_scaling` is used to query support for prediction
+ // across spatial layers. An example where support for reference scaling is
+ // needed is if the video stream is produced with a scalability mode that has
+ // a dependency between the spatial layers. See
+ // https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification of
+ // different scalabilty modes. NOTE: QueryCodecSupport is currently an
+ // experimental feature that is subject to change without notice.
+ virtual CodecSupport QueryCodecSupport(const SdpVideoFormat& format,
+ bool reference_scaling) const {
+ // Default implementation, query for supported formats and check if the
+ // specified format is supported. Returns false if `reference_scaling` is
+ // true.
+ CodecSupport codec_support;
+ codec_support.is_supported =
+ !reference_scaling && format.IsCodecInList(GetSupportedFormats());
+ return codec_support;
+ }
+
+ // Creates a VideoDecoder for the specified format.
+ virtual std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) = 0;
+
+ virtual ~VideoDecoderFactory() {}
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template.h
new file mode 100644
index 0000000000..703ae11664
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "api/array_view.h"
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_decoder_factory.h"
+
+namespace webrtc {
+// The VideoDecoderFactoryTemplate supports decoder implementations given as
+// template arguments.
+//
+// To include a decoder in the factory it requires two static members
+// functions to be defined:
+//
+// // Returns the supported SdpVideoFormats this decoder can decode.
+// static std::vector<SdpVideoFormat> SupportedFormats();
+//
+// // Creates a decoder instance for the given format.
+// static std::unique_ptr<VideoDecoder>
+// CreateDecoder(const SdpVideoFormat& format);
+//
+// Note that the order of the template arguments matter as the factory will
+// return the first decoder implementation supporting the given SdpVideoFormat.
+template <typename... Ts>
+class VideoDecoderFactoryTemplate : public VideoDecoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ return GetSupportedFormatsInternal<Ts...>();
+ }
+
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override {
+ return CreateVideoDecoderInternal<Ts...>(format);
+ }
+
+ private:
+ bool IsFormatInList(
+ const SdpVideoFormat& format,
+ rtc::ArrayView<const SdpVideoFormat> supported_formats) const {
+ return absl::c_any_of(
+ supported_formats, [&](const SdpVideoFormat& supported_format) {
+ return supported_format.name == format.name &&
+ supported_format.parameters == format.parameters;
+ });
+ }
+
+ template <typename V, typename... Vs>
+ std::vector<SdpVideoFormat> GetSupportedFormatsInternal() const {
+ auto supported_formats = V::SupportedFormats();
+
+ if constexpr (sizeof...(Vs) > 0) {
+ // Supported formats may overlap between implementations, so duplicates
+ // should be filtered out.
+ for (const auto& other_format : GetSupportedFormatsInternal<Vs...>()) {
+ if (!IsFormatInList(other_format, supported_formats)) {
+ supported_formats.push_back(other_format);
+ }
+ }
+ }
+
+ return supported_formats;
+ }
+
+ template <typename V, typename... Vs>
+ std::unique_ptr<VideoDecoder> CreateVideoDecoderInternal(
+ const SdpVideoFormat& format) {
+ if (IsFormatInList(format, V::SupportedFormats())) {
+ return V::CreateDecoder(format);
+ }
+
+ if constexpr (sizeof...(Vs) > 0) {
+ return CreateVideoDecoderInternal<Vs...>(format);
+ }
+
+ return nullptr;
+ }
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h
new file mode 100644
index 0000000000..6d80cadf83
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
+
+namespace webrtc {
+struct Dav1dDecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return {SdpVideoFormat("AV1")};
+ }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+ return CreateDav1dDecoder();
+ }
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_DAV1D_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h
new file mode 100644
index 0000000000..0c45a4b622
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+
+namespace webrtc {
+struct LibvpxVp8DecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return {SdpVideoFormat("VP8")};
+ }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+ return VP8Decoder::Create();
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h
new file mode 100644
index 0000000000..e0ec0010be
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+namespace webrtc {
+struct LibvpxVp9DecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return SupportedVP9DecoderCodecs();
+ }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+ return VP9Decoder::Create();
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h
new file mode 100644
index 0000000000..2746bde132
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "modules/video_coding/codecs/h264/include/h264.h"
+
+namespace webrtc {
+// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build
+// target remove #ifdefs.
+struct OpenH264DecoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+#if defined(WEBRTC_USE_H264)
+
+ return SupportedH264DecoderCodecs();
+#else
+ return {};
+#endif
+ }
+
+ static std::unique_ptr<VideoDecoder> CreateDecoder(
+ const SdpVideoFormat& format) {
+#if defined(WEBRTC_USE_H264)
+
+ return H264Decoder::Create();
+#else
+ return nullptr;
+#endif
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc
new file mode 100644
index 0000000000..cf6f823b92
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc
@@ -0,0 +1,284 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_decoder_software_fallback_wrapper.h"
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/video/encoded_image.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/trace_event.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace {
+
+constexpr size_t kMaxConsequtiveHwErrors = 4;
+
+class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder {
+ public:
+ VideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder);
+ ~VideoDecoderSoftwareFallbackWrapper() override;
+
+ bool Configure(const Settings& settings) override;
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ DecoderInfo GetDecoderInfo() const override;
+ const char* ImplementationName() const override;
+
+ private:
+ bool InitFallbackDecoder();
+ void UpdateFallbackDecoderHistograms();
+
+ bool InitHwDecoder();
+
+ VideoDecoder& active_decoder() const;
+
+ // Determines if we are trying to use the HW or SW decoder.
+ enum class DecoderType {
+ kNone,
+ kHardware,
+ kFallback,
+ } decoder_type_;
+ std::unique_ptr<VideoDecoder> hw_decoder_;
+
+ Settings decoder_settings_;
+ const std::unique_ptr<VideoDecoder> fallback_decoder_;
+ const std::string fallback_implementation_name_;
+ DecodedImageCallback* callback_;
+ int32_t hw_decoded_frames_since_last_fallback_;
+ size_t hw_consequtive_generic_errors_;
+};
+
+VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder)
+ : decoder_type_(DecoderType::kNone),
+ hw_decoder_(std::move(hw_decoder)),
+ fallback_decoder_(std::move(sw_fallback_decoder)),
+ fallback_implementation_name_(
+ fallback_decoder_->GetDecoderInfo().implementation_name +
+ " (fallback from: " +
+ hw_decoder_->GetDecoderInfo().implementation_name + ")"),
+ callback_(nullptr),
+ hw_decoded_frames_since_last_fallback_(0),
+ hw_consequtive_generic_errors_(0) {}
+VideoDecoderSoftwareFallbackWrapper::~VideoDecoderSoftwareFallbackWrapper() =
+ default;
+
+bool VideoDecoderSoftwareFallbackWrapper::Configure(const Settings& settings) {
+ decoder_settings_ = settings;
+
+ if (webrtc::field_trial::IsEnabled("WebRTC-Video-ForcedSwDecoderFallback")) {
+ RTC_LOG(LS_INFO) << "Forced software decoder fallback enabled.";
+ RTC_DCHECK(decoder_type_ == DecoderType::kNone);
+ return InitFallbackDecoder();
+ }
+ if (InitHwDecoder()) {
+ return true;
+ }
+
+ RTC_DCHECK(decoder_type_ == DecoderType::kNone);
+ return InitFallbackDecoder();
+}
+
+bool VideoDecoderSoftwareFallbackWrapper::InitHwDecoder() {
+ RTC_DCHECK(decoder_type_ == DecoderType::kNone);
+ if (!hw_decoder_->Configure(decoder_settings_)) {
+ return false;
+ }
+
+ decoder_type_ = DecoderType::kHardware;
+ if (callback_)
+ hw_decoder_->RegisterDecodeCompleteCallback(callback_);
+ return true;
+}
+
+bool VideoDecoderSoftwareFallbackWrapper::InitFallbackDecoder() {
+ RTC_DCHECK(decoder_type_ == DecoderType::kNone ||
+ decoder_type_ == DecoderType::kHardware);
+ RTC_LOG(LS_WARNING) << "Decoder falling back to software decoding.";
+ if (!fallback_decoder_->Configure(decoder_settings_)) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize software-decoder fallback.";
+ return false;
+ }
+
+ UpdateFallbackDecoderHistograms();
+
+ if (decoder_type_ == DecoderType::kHardware) {
+ hw_decoder_->Release();
+ }
+ decoder_type_ = DecoderType::kFallback;
+
+ if (callback_)
+ fallback_decoder_->RegisterDecodeCompleteCallback(callback_);
+ return true;
+}
+
+void VideoDecoderSoftwareFallbackWrapper::UpdateFallbackDecoderHistograms() {
+ const std::string kFallbackHistogramsUmaPrefix =
+ "WebRTC.Video.HardwareDecodedFramesBetweenSoftwareFallbacks.";
+ // Each histogram needs its own code path for this to work otherwise the
+ // histogram names will be mixed up by the optimization that takes place.
+ switch (decoder_settings_.codec_type()) {
+ case kVideoCodecGeneric:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Generic",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ case kVideoCodecVP8:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Vp8",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ case kVideoCodecVP9:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Vp9",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ case kVideoCodecAV1:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Av1",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ case kVideoCodecH264:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H264",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ case kVideoCodecMultiplex:
+ RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Multiplex",
+ hw_decoded_frames_since_last_fallback_);
+ break;
+ }
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::Decode(
+ const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ TRACE_EVENT0("webrtc", "VideoDecoderSoftwareFallbackWrapper::Decode");
+ switch (decoder_type_) {
+ case DecoderType::kNone:
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ case DecoderType::kHardware: {
+ int32_t ret = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ ret = hw_decoder_->Decode(input_image, missing_frames, render_time_ms);
+ if (ret != WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE) {
+ if (ret != WEBRTC_VIDEO_CODEC_ERROR) {
+ ++hw_decoded_frames_since_last_fallback_;
+ hw_consequtive_generic_errors_ = 0;
+ return ret;
+ }
+ if (input_image._frameType == VideoFrameType::kVideoFrameKey) {
+ // Only count errors on key-frames, since generic errors can happen
+ // with hw decoder due to many arbitrary reasons.
+ // However, requesting a key-frame is supposed to fix the issue.
+ ++hw_consequtive_generic_errors_;
+ }
+ if (hw_consequtive_generic_errors_ < kMaxConsequtiveHwErrors) {
+ return ret;
+ }
+ }
+
+ // HW decoder returned WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE or
+ // too many generic errors on key-frames encountered.
+ if (!InitFallbackDecoder()) {
+ return ret;
+ }
+
+ // Fallback decoder initialized, fall-through.
+ [[fallthrough]];
+ }
+ case DecoderType::kFallback:
+ return fallback_decoder_->Decode(input_image, missing_frames,
+ render_time_ms);
+ default:
+ RTC_DCHECK_NOTREACHED();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return active_decoder().RegisterDecodeCompleteCallback(callback);
+}
+
+int32_t VideoDecoderSoftwareFallbackWrapper::Release() {
+ int32_t status;
+ switch (decoder_type_) {
+ case DecoderType::kHardware:
+ status = hw_decoder_->Release();
+ break;
+ case DecoderType::kFallback:
+ RTC_LOG(LS_INFO) << "Releasing software fallback decoder.";
+ status = fallback_decoder_->Release();
+ break;
+ case DecoderType::kNone:
+ status = WEBRTC_VIDEO_CODEC_OK;
+ break;
+ default:
+ RTC_DCHECK_NOTREACHED();
+ status = WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ decoder_type_ = DecoderType::kNone;
+ return status;
+}
+
+VideoDecoder::DecoderInfo VideoDecoderSoftwareFallbackWrapper::GetDecoderInfo()
+ const {
+ DecoderInfo info = active_decoder().GetDecoderInfo();
+ if (decoder_type_ == DecoderType::kFallback) {
+ // Cached "A (fallback from B)" string.
+ info.implementation_name = fallback_implementation_name_;
+ }
+ return info;
+}
+
+const char* VideoDecoderSoftwareFallbackWrapper::ImplementationName() const {
+ if (decoder_type_ == DecoderType::kFallback) {
+ // Cached "A (fallback from B)" string.
+ return fallback_implementation_name_.c_str();
+ } else {
+ return hw_decoder_->ImplementationName();
+ }
+}
+
+VideoDecoder& VideoDecoderSoftwareFallbackWrapper::active_decoder() const {
+ return decoder_type_ == DecoderType::kFallback ? *fallback_decoder_
+ : *hw_decoder_;
+}
+
+} // namespace
+
+std::unique_ptr<VideoDecoder> CreateVideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder) {
+ return std::make_unique<VideoDecoderSoftwareFallbackWrapper>(
+ std::move(sw_fallback_decoder), std::move(hw_decoder));
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.h b/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.h
new file mode 100644
index 0000000000..3f44e02b26
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_decoder_software_fallback_wrapper.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_DECODER_SOFTWARE_FALLBACK_WRAPPER_H_
+#define API_VIDEO_CODECS_VIDEO_DECODER_SOFTWARE_FALLBACK_WRAPPER_H_
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Used to wrap external VideoDecoders to provide a fallback option on
+// software decoding when a hardware decoder fails to decode a stream due to
+// hardware restrictions, such as max resolution.
+RTC_EXPORT std::unique_ptr<VideoDecoder>
+CreateVideoDecoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoDecoder> sw_fallback_decoder,
+ std::unique_ptr<VideoDecoder> hw_decoder);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_DECODER_SOFTWARE_FALLBACK_WRAPPER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder.cc b/third_party/libwebrtc/api/video_codecs/video_encoder.cc
new file mode 100644
index 0000000000..deb4fdc637
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder.cc
@@ -0,0 +1,345 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_encoder.h"
+
+#include <string.h>
+#include <algorithm>
+
+#include "rtc_base/checks.h"
+#include "rtc_base/strings/string_builder.h"
+
+namespace webrtc {
+
+// TODO(mflodman): Add default complexity for VP9 and VP9.
+VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() {
+ VideoCodecVP8 vp8_settings;
+ memset(&vp8_settings, 0, sizeof(vp8_settings));
+
+ vp8_settings.numberOfTemporalLayers = 1;
+ vp8_settings.denoisingOn = true;
+ vp8_settings.automaticResizeOn = false;
+ vp8_settings.keyFrameInterval = 3000;
+
+ return vp8_settings;
+}
+
+VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() {
+ VideoCodecVP9 vp9_settings;
+ memset(&vp9_settings, 0, sizeof(vp9_settings));
+
+ vp9_settings.numberOfTemporalLayers = 1;
+ vp9_settings.denoisingOn = true;
+ vp9_settings.keyFrameInterval = 3000;
+ vp9_settings.adaptiveQpMode = true;
+ vp9_settings.automaticResizeOn = true;
+ vp9_settings.numberOfSpatialLayers = 1;
+ vp9_settings.flexibleMode = false;
+ vp9_settings.interLayerPred = InterLayerPredMode::kOn;
+
+ return vp9_settings;
+}
+
+VideoCodecH264 VideoEncoder::GetDefaultH264Settings() {
+ VideoCodecH264 h264_settings;
+ memset(&h264_settings, 0, sizeof(h264_settings));
+
+ h264_settings.keyFrameInterval = 3000;
+ h264_settings.numberOfTemporalLayers = 1;
+
+ return h264_settings;
+}
+
+VideoEncoder::ScalingSettings::ScalingSettings() = default;
+
+VideoEncoder::ScalingSettings::ScalingSettings(KOff) : ScalingSettings() {}
+
+VideoEncoder::ScalingSettings::ScalingSettings(int low, int high)
+ : thresholds(QpThresholds(low, high)) {}
+
+VideoEncoder::ScalingSettings::ScalingSettings(int low,
+ int high,
+ int min_pixels)
+ : thresholds(QpThresholds(low, high)), min_pixels_per_frame(min_pixels) {}
+
+VideoEncoder::ScalingSettings::ScalingSettings(const ScalingSettings&) =
+ default;
+
+VideoEncoder::ScalingSettings::~ScalingSettings() {}
+
+// static
+constexpr VideoEncoder::ScalingSettings::KOff
+ VideoEncoder::ScalingSettings::kOff;
+// static
+constexpr uint8_t VideoEncoder::EncoderInfo::kMaxFramerateFraction;
+
+bool VideoEncoder::ResolutionBitrateLimits::operator==(
+ const ResolutionBitrateLimits& rhs) const {
+ return frame_size_pixels == rhs.frame_size_pixels &&
+ min_start_bitrate_bps == rhs.min_start_bitrate_bps &&
+ min_bitrate_bps == rhs.min_bitrate_bps &&
+ max_bitrate_bps == rhs.max_bitrate_bps;
+}
+
+VideoEncoder::EncoderInfo::EncoderInfo()
+ : scaling_settings(VideoEncoder::ScalingSettings::kOff),
+ requested_resolution_alignment(1),
+ apply_alignment_to_all_simulcast_layers(false),
+ supports_native_handle(false),
+ implementation_name("unknown"),
+ has_trusted_rate_controller(false),
+ is_hardware_accelerated(true),
+ fps_allocation{absl::InlinedVector<uint8_t, kMaxTemporalStreams>(
+ 1,
+ kMaxFramerateFraction)},
+ supports_simulcast(false),
+ preferred_pixel_formats{VideoFrameBuffer::Type::kI420} {}
+
+VideoEncoder::EncoderInfo::EncoderInfo(const EncoderInfo&) = default;
+
+VideoEncoder::EncoderInfo::~EncoderInfo() = default;
+
+std::string VideoEncoder::EncoderInfo::ToString() const {
+ char string_buf[2048];
+ rtc::SimpleStringBuilder oss(string_buf);
+
+ oss << "EncoderInfo { "
+ "ScalingSettings { ";
+ if (scaling_settings.thresholds) {
+ oss << "Thresholds { "
+ "low = "
+ << scaling_settings.thresholds->low
+ << ", high = " << scaling_settings.thresholds->high << "}, ";
+ }
+ oss << "min_pixels_per_frame = " << scaling_settings.min_pixels_per_frame
+ << " }";
+ oss << ", requested_resolution_alignment = " << requested_resolution_alignment
+ << ", apply_alignment_to_all_simulcast_layers = "
+ << apply_alignment_to_all_simulcast_layers
+ << ", supports_native_handle = " << supports_native_handle
+ << ", implementation_name = '" << implementation_name
+ << "'"
+ ", has_trusted_rate_controller = "
+ << has_trusted_rate_controller
+ << ", is_hardware_accelerated = " << is_hardware_accelerated
+ << ", fps_allocation = [";
+ size_t num_spatial_layer_with_fps_allocation = 0;
+ for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
+ if (!fps_allocation[i].empty()) {
+ num_spatial_layer_with_fps_allocation = i + 1;
+ }
+ }
+ bool first = true;
+ for (size_t i = 0; i < num_spatial_layer_with_fps_allocation; ++i) {
+ if (fps_allocation[i].empty()) {
+ break;
+ }
+ if (!first) {
+ oss << ", ";
+ }
+ const absl::InlinedVector<uint8_t, kMaxTemporalStreams>& fractions =
+ fps_allocation[i];
+ if (!fractions.empty()) {
+ first = false;
+ oss << "[ ";
+ for (size_t i = 0; i < fractions.size(); ++i) {
+ if (i > 0) {
+ oss << ", ";
+ }
+ oss << (static_cast<double>(fractions[i]) / kMaxFramerateFraction);
+ }
+ oss << "] ";
+ }
+ }
+ oss << "]";
+ oss << ", resolution_bitrate_limits = [";
+ for (size_t i = 0; i < resolution_bitrate_limits.size(); ++i) {
+ if (i > 0) {
+ oss << ", ";
+ }
+ ResolutionBitrateLimits l = resolution_bitrate_limits[i];
+ oss << "Limits { "
+ "frame_size_pixels = "
+ << l.frame_size_pixels
+ << ", min_start_bitrate_bps = " << l.min_start_bitrate_bps
+ << ", min_bitrate_bps = " << l.min_bitrate_bps
+ << ", max_bitrate_bps = " << l.max_bitrate_bps << "} ";
+ }
+ oss << "] "
+ ", supports_simulcast = "
+ << supports_simulcast;
+ oss << ", preferred_pixel_formats = [";
+ for (size_t i = 0; i < preferred_pixel_formats.size(); ++i) {
+ if (i > 0)
+ oss << ", ";
+#if defined(WEBRTC_MOZILLA_BUILD)
+ // This could assert, as opposed to throw using the form in the
+ // else, but since we're in a for loop that uses .size() we can
+ // be fairly sure that this is safe without doing a further
+ // check to make sure 'i' is in-range.
+ oss << VideoFrameBufferTypeToString(preferred_pixel_formats[i]);
+#else
+ oss << VideoFrameBufferTypeToString(preferred_pixel_formats.at(i));
+#endif
+ }
+ oss << "]";
+ if (is_qp_trusted.has_value()) {
+ oss << ", is_qp_trusted = " << is_qp_trusted.value();
+ }
+ oss << "}";
+ return oss.str();
+}
+
+bool VideoEncoder::EncoderInfo::operator==(const EncoderInfo& rhs) const {
+ if (scaling_settings.thresholds.has_value() !=
+ rhs.scaling_settings.thresholds.has_value()) {
+ return false;
+ }
+ if (scaling_settings.thresholds.has_value()) {
+ QpThresholds l = *scaling_settings.thresholds;
+ QpThresholds r = *rhs.scaling_settings.thresholds;
+ if (l.low != r.low || l.high != r.high) {
+ return false;
+ }
+ }
+ if (scaling_settings.min_pixels_per_frame !=
+ rhs.scaling_settings.min_pixels_per_frame) {
+ return false;
+ }
+
+ if (supports_native_handle != rhs.supports_native_handle ||
+ implementation_name != rhs.implementation_name ||
+ has_trusted_rate_controller != rhs.has_trusted_rate_controller ||
+ is_hardware_accelerated != rhs.is_hardware_accelerated) {
+ return false;
+ }
+
+ for (size_t i = 0; i < kMaxSpatialLayers; ++i) {
+ if (fps_allocation[i] != rhs.fps_allocation[i]) {
+ return false;
+ }
+ }
+
+ if (resolution_bitrate_limits != rhs.resolution_bitrate_limits ||
+ supports_simulcast != rhs.supports_simulcast) {
+ return false;
+ }
+
+ return true;
+}
+
+absl::optional<VideoEncoder::ResolutionBitrateLimits>
+VideoEncoder::EncoderInfo::GetEncoderBitrateLimitsForResolution(
+ int frame_size_pixels) const {
+ std::vector<ResolutionBitrateLimits> bitrate_limits =
+ resolution_bitrate_limits;
+
+ // Sort the list of bitrate limits by resolution.
+ sort(bitrate_limits.begin(), bitrate_limits.end(),
+ [](const ResolutionBitrateLimits& lhs,
+ const ResolutionBitrateLimits& rhs) {
+ return lhs.frame_size_pixels < rhs.frame_size_pixels;
+ });
+
+ for (size_t i = 0; i < bitrate_limits.size(); ++i) {
+ RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps, 0);
+ RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps, 0);
+ RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps,
+ bitrate_limits[i].min_bitrate_bps);
+ if (i > 0) {
+ // The bitrate limits aren't expected to decrease with resolution.
+ RTC_DCHECK_GE(bitrate_limits[i].min_bitrate_bps,
+ bitrate_limits[i - 1].min_bitrate_bps);
+ RTC_DCHECK_GE(bitrate_limits[i].min_start_bitrate_bps,
+ bitrate_limits[i - 1].min_start_bitrate_bps);
+ RTC_DCHECK_GE(bitrate_limits[i].max_bitrate_bps,
+ bitrate_limits[i - 1].max_bitrate_bps);
+ }
+
+ if (bitrate_limits[i].frame_size_pixels >= frame_size_pixels) {
+ return absl::optional<ResolutionBitrateLimits>(bitrate_limits[i]);
+ }
+ }
+
+ return absl::nullopt;
+}
+
+VideoEncoder::RateControlParameters::RateControlParameters()
+ : bitrate(VideoBitrateAllocation()),
+ framerate_fps(0.0),
+ bandwidth_allocation(DataRate::Zero()) {}
+
+VideoEncoder::RateControlParameters::RateControlParameters(
+ const VideoBitrateAllocation& bitrate,
+ double framerate_fps)
+ : bitrate(bitrate),
+ framerate_fps(framerate_fps),
+ bandwidth_allocation(DataRate::BitsPerSec(bitrate.get_sum_bps())) {}
+
+VideoEncoder::RateControlParameters::RateControlParameters(
+ const VideoBitrateAllocation& bitrate,
+ double framerate_fps,
+ DataRate bandwidth_allocation)
+ : bitrate(bitrate),
+ framerate_fps(framerate_fps),
+ bandwidth_allocation(bandwidth_allocation) {}
+
+bool VideoEncoder::RateControlParameters::operator==(
+ const VideoEncoder::RateControlParameters& rhs) const {
+ return std::tie(bitrate, framerate_fps, bandwidth_allocation) ==
+ std::tie(rhs.bitrate, rhs.framerate_fps, rhs.bandwidth_allocation);
+}
+
+bool VideoEncoder::RateControlParameters::operator!=(
+ const VideoEncoder::RateControlParameters& rhs) const {
+ return !(rhs == *this);
+}
+
+VideoEncoder::RateControlParameters::~RateControlParameters() = default;
+
+void VideoEncoder::SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) {}
+
+int32_t VideoEncoder::InitEncode(const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) {
+ const VideoEncoder::Capabilities capabilities(/* loss_notification= */ false);
+ const VideoEncoder::Settings settings(capabilities, number_of_cores,
+ max_payload_size);
+ // In theory, this and the other version of InitEncode() could end up calling
+ // each other in a loop until we get a stack overflow.
+ // In practice, any subclass of VideoEncoder would overload at least one
+ // of these, and we have a TODO in the header file to make this pure virtual.
+ return InitEncode(codec_settings, settings);
+}
+
+int VideoEncoder::InitEncode(const VideoCodec* codec_settings,
+ const VideoEncoder::Settings& settings) {
+ // In theory, this and the other version of InitEncode() could end up calling
+ // each other in a loop until we get a stack overflow.
+ // In practice, any subclass of VideoEncoder would overload at least one
+ // of these, and we have a TODO in the header file to make this pure virtual.
+ return InitEncode(codec_settings, settings.number_of_cores,
+ settings.max_payload_size);
+}
+
+void VideoEncoder::OnPacketLossRateUpdate(float packet_loss_rate) {}
+
+void VideoEncoder::OnRttUpdate(int64_t rtt_ms) {}
+
+void VideoEncoder::OnLossNotification(
+ const LossNotification& loss_notification) {}
+
+// TODO(webrtc:9722): Remove and make pure virtual.
+VideoEncoder::EncoderInfo VideoEncoder::GetEncoderInfo() const {
+ return EncoderInfo();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder.h b/third_party/libwebrtc/api/video_codecs/video_encoder.h
new file mode 100644
index 0000000000..395a87e089
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder.h
@@ -0,0 +1,426 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_H_
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "absl/types/optional.h"
+#include "api/fec_controller_override.h"
+#include "api/units/data_rate.h"
+#include "api/video/encoded_image.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_codec_constants.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// TODO(pbos): Expose these through a public (root) header or change these APIs.
+struct CodecSpecificInfo;
+
+constexpr int kDefaultMinPixelsPerFrame = 320 * 180;
+
+class RTC_EXPORT EncodedImageCallback {
+ public:
+ virtual ~EncodedImageCallback() {}
+
+ struct Result {
+ enum Error {
+ OK,
+
+ // Failed to send the packet.
+ ERROR_SEND_FAILED,
+ };
+
+ explicit Result(Error error) : error(error) {}
+ Result(Error error, uint32_t frame_id) : error(error), frame_id(frame_id) {}
+
+ Error error;
+
+ // Frame ID assigned to the frame. The frame ID should be the same as the ID
+ // seen by the receiver for this frame. RTP timestamp of the frame is used
+ // as frame ID when RTP is used to send video. Must be used only when
+ // error=OK.
+ uint32_t frame_id = 0;
+
+ // Tells the encoder that the next frame is should be dropped.
+ bool drop_next_frame = false;
+ };
+
+ // Used to signal the encoder about reason a frame is dropped.
+ // kDroppedByMediaOptimizations - dropped by MediaOptimizations (for rate
+ // limiting purposes).
+ // kDroppedByEncoder - dropped by encoder's internal rate limiter.
+ // TODO(bugs.webrtc.org/10164): Delete this enum? It duplicates the more
+ // general VideoStreamEncoderObserver::DropReason. Also,
+ // kDroppedByMediaOptimizations is not produced by any encoder, but by
+ // VideoStreamEncoder.
+ enum class DropReason : uint8_t {
+ kDroppedByMediaOptimizations,
+ kDroppedByEncoder
+ };
+
+ // Callback function which is called when an image has been encoded.
+ virtual Result OnEncodedImage(
+ const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info) = 0;
+
+ virtual void OnDroppedFrame(DropReason reason) {}
+};
+
+class RTC_EXPORT VideoEncoder {
+ public:
+ struct QpThresholds {
+ QpThresholds(int l, int h) : low(l), high(h) {}
+ QpThresholds() : low(-1), high(-1) {}
+ int low;
+ int high;
+ };
+
+ // Quality scaling is enabled if thresholds are provided.
+ struct RTC_EXPORT ScalingSettings {
+ private:
+ // Private magic type for kOff, implicitly convertible to
+ // ScalingSettings.
+ struct KOff {};
+
+ public:
+ // TODO(bugs.webrtc.org/9078): Since absl::optional should be trivially copy
+ // constructible, this magic value can likely be replaced by a constexpr
+ // ScalingSettings value.
+ static constexpr KOff kOff = {};
+
+ ScalingSettings(int low, int high);
+ ScalingSettings(int low, int high, int min_pixels);
+ ScalingSettings(const ScalingSettings&);
+ ScalingSettings(KOff); // NOLINT(runtime/explicit)
+ ~ScalingSettings();
+
+ absl::optional<QpThresholds> thresholds;
+
+ // We will never ask for a resolution lower than this.
+ // TODO(kthelgason): Lower this limit when better testing
+ // on MediaCodec and fallback implementations are in place.
+ // See https://bugs.chromium.org/p/webrtc/issues/detail?id=7206
+ int min_pixels_per_frame = kDefaultMinPixelsPerFrame;
+
+ private:
+ // Private constructor; to get an object without thresholds, use
+ // the magic constant ScalingSettings::kOff.
+ ScalingSettings();
+ };
+
+ // Bitrate limits for resolution.
+ struct ResolutionBitrateLimits {
+ ResolutionBitrateLimits(int frame_size_pixels,
+ int min_start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps)
+ : frame_size_pixels(frame_size_pixels),
+ min_start_bitrate_bps(min_start_bitrate_bps),
+ min_bitrate_bps(min_bitrate_bps),
+ max_bitrate_bps(max_bitrate_bps) {}
+ // Size of video frame, in pixels, the bitrate thresholds are intended for.
+ int frame_size_pixels = 0;
+ // Recommended minimum bitrate to start encoding.
+ int min_start_bitrate_bps = 0;
+ // Recommended minimum bitrate.
+ int min_bitrate_bps = 0;
+ // Recommended maximum bitrate.
+ int max_bitrate_bps = 0;
+
+ bool operator==(const ResolutionBitrateLimits& rhs) const;
+ bool operator!=(const ResolutionBitrateLimits& rhs) const {
+ return !(*this == rhs);
+ }
+ };
+
+ // Struct containing metadata about the encoder implementing this interface.
+ struct RTC_EXPORT EncoderInfo {
+ static constexpr uint8_t kMaxFramerateFraction =
+ std::numeric_limits<uint8_t>::max();
+
+ EncoderInfo();
+ EncoderInfo(const EncoderInfo&);
+
+ ~EncoderInfo();
+
+ std::string ToString() const;
+ bool operator==(const EncoderInfo& rhs) const;
+ bool operator!=(const EncoderInfo& rhs) const { return !(*this == rhs); }
+
+ // Any encoder implementation wishing to use the WebRTC provided
+ // quality scaler must populate this field.
+ ScalingSettings scaling_settings;
+
+ // The width and height of the incoming video frames should be divisible
+ // by `requested_resolution_alignment`. If they are not, the encoder may
+ // drop the incoming frame.
+ // For example: With I420, this value would be a multiple of 2.
+ // Note that this field is unrelated to any horizontal or vertical stride
+ // requirements the encoder has on the incoming video frame buffers.
+ uint32_t requested_resolution_alignment;
+
+ // Same as above but if true, each simulcast layer should also be divisible
+ // by `requested_resolution_alignment`.
+ // Note that scale factors `scale_resolution_down_by` may be adjusted so a
+ // common multiple is not too large to avoid largely cropped frames and
+ // possibly with an aspect ratio far from the original.
+ // Warning: large values of scale_resolution_down_by could be changed
+ // considerably, especially if `requested_resolution_alignment` is large.
+ bool apply_alignment_to_all_simulcast_layers;
+
+ // If true, encoder supports working with a native handle (e.g. texture
+ // handle for hw codecs) rather than requiring a raw I420 buffer.
+ bool supports_native_handle;
+
+ // The name of this particular encoder implementation, e.g. "libvpx".
+ std::string implementation_name;
+
+ // If this field is true, the encoder rate controller must perform
+ // well even in difficult situations, and produce close to the specified
+ // target bitrate seen over a reasonable time window, drop frames if
+ // necessary in order to keep the rate correct, and react quickly to
+ // changing bitrate targets. If this method returns true, we disable the
+ // frame dropper in the media optimization module and rely entirely on the
+ // encoder to produce media at a bitrate that closely matches the target.
+ // Any overshooting may result in delay buildup. If this method returns
+ // false (default behavior), the media opt frame dropper will drop input
+ // frames if it suspect encoder misbehavior. Misbehavior is common,
+ // especially in hardware codecs. Disable media opt at your own risk.
+ bool has_trusted_rate_controller;
+
+ // If this field is true, the encoder uses hardware support and different
+ // thresholds will be used in CPU adaptation.
+ bool is_hardware_accelerated;
+
+ // For each spatial layer (simulcast stream or SVC layer), represented as an
+ // element in `fps_allocation` a vector indicates how many temporal layers
+ // the encoder is using for that spatial layer.
+ // For each spatial/temporal layer pair, the frame rate fraction is given as
+ // an 8bit unsigned integer where 0 = 0% and 255 = 100%.
+ //
+ // If the vector is empty for a given spatial layer, it indicates that frame
+ // rates are not defined and we can't count on any specific frame rate to be
+ // generated. Likely this indicates Vp8TemporalLayersType::kBitrateDynamic.
+ //
+ // The encoder may update this on a per-frame basis in response to both
+ // internal and external signals.
+ //
+ // Spatial layers are treated independently, but temporal layers are
+ // cumulative. For instance, if:
+ // fps_allocation[0][0] = kFullFramerate / 2;
+ // fps_allocation[0][1] = kFullFramerate;
+ // Then half of the frames are in the base layer and half is in TL1, but
+ // since TL1 is assumed to depend on the base layer, the frame rate is
+ // indicated as the full 100% for the top layer.
+ //
+ // Defaults to a single spatial layer containing a single temporal layer
+ // with a 100% frame rate fraction.
+ absl::InlinedVector<uint8_t, kMaxTemporalStreams>
+ fps_allocation[kMaxSpatialLayers];
+
+ // Recommended bitrate limits for different resolutions.
+ std::vector<ResolutionBitrateLimits> resolution_bitrate_limits;
+
+ // Obtains the limits from `resolution_bitrate_limits` that best matches the
+ // `frame_size_pixels`.
+ absl::optional<ResolutionBitrateLimits>
+ GetEncoderBitrateLimitsForResolution(int frame_size_pixels) const;
+
+ // If true, this encoder has internal support for generating simulcast
+ // streams. Otherwise, an adapter class will be needed.
+ // Even if true, the config provided to InitEncode() might not be supported,
+ // in such case the encoder should return
+ // WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED.
+ bool supports_simulcast;
+
+ // The list of pixel formats preferred by the encoder. It is assumed that if
+ // the list is empty and supports_native_handle is false, then {I420} is the
+ // preferred pixel format. The order of the formats does not matter.
+ absl::InlinedVector<VideoFrameBuffer::Type, kMaxPreferredPixelFormats>
+ preferred_pixel_formats;
+
+ // Indicates whether or not QP value encoder writes into frame/slice/tile
+ // header can be interpreted as average frame/slice/tile QP.
+ absl::optional<bool> is_qp_trusted;
+ };
+
+ struct RTC_EXPORT RateControlParameters {
+ RateControlParameters();
+ RateControlParameters(const VideoBitrateAllocation& bitrate,
+ double framerate_fps);
+ RateControlParameters(const VideoBitrateAllocation& bitrate,
+ double framerate_fps,
+ DataRate bandwidth_allocation);
+ virtual ~RateControlParameters();
+
+ // Target bitrate, per spatial/temporal layer.
+ // A target bitrate of 0bps indicates a layer should not be encoded at all.
+ VideoBitrateAllocation target_bitrate;
+ // Adjusted target bitrate, per spatial/temporal layer. May be lower or
+ // higher than the target depending on encoder behaviour.
+ VideoBitrateAllocation bitrate;
+ // Target framerate, in fps. A value <= 0.0 is invalid and should be
+ // interpreted as framerate target not available. In this case the encoder
+ // should fall back to the max framerate specified in `codec_settings` of
+ // the last InitEncode() call.
+ double framerate_fps;
+ // The network bandwidth available for video. This is at least
+ // `bitrate.get_sum_bps()`, but may be higher if the application is not
+ // network constrained.
+ DataRate bandwidth_allocation;
+
+ bool operator==(const RateControlParameters& rhs) const;
+ bool operator!=(const RateControlParameters& rhs) const;
+ };
+
+ struct LossNotification {
+ // The timestamp of the last decodable frame *prior* to the last received.
+ // (The last received - described below - might itself be decodable or not.)
+ uint32_t timestamp_of_last_decodable;
+ // The timestamp of the last received frame.
+ uint32_t timestamp_of_last_received;
+ // Describes whether the dependencies of the last received frame were
+ // all decodable.
+ // `false` if some dependencies were undecodable, `true` if all dependencies
+ // were decodable, and `nullopt` if the dependencies are unknown.
+ absl::optional<bool> dependencies_of_last_received_decodable;
+ // Describes whether the received frame was decodable.
+ // `false` if some dependency was undecodable or if some packet belonging
+ // to the last received frame was missed.
+ // `true` if all dependencies were decodable and all packets belonging
+ // to the last received frame were received.
+ // `nullopt` if no packet belonging to the last frame was missed, but the
+ // last packet in the frame was not yet received.
+ absl::optional<bool> last_received_decodable;
+ };
+
+ // Negotiated capabilities which the VideoEncoder may expect the other
+ // side to use.
+ struct Capabilities {
+ explicit Capabilities(bool loss_notification)
+ : loss_notification(loss_notification) {}
+ bool loss_notification;
+ };
+
+ struct Settings {
+ Settings(const Capabilities& capabilities,
+ int number_of_cores,
+ size_t max_payload_size)
+ : capabilities(capabilities),
+ number_of_cores(number_of_cores),
+ max_payload_size(max_payload_size) {}
+
+ Capabilities capabilities;
+ int number_of_cores;
+ size_t max_payload_size;
+ };
+
+ static VideoCodecVP8 GetDefaultVp8Settings();
+ static VideoCodecVP9 GetDefaultVp9Settings();
+ static VideoCodecH264 GetDefaultH264Settings();
+
+ virtual ~VideoEncoder() {}
+
+ // Set a FecControllerOverride, through which the encoder may override
+ // decisions made by FecController.
+ // TODO(bugs.webrtc.org/10769): Update downstream, then make pure-virtual.
+ virtual void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override);
+
+ // Initialize the encoder with the information from the codecSettings
+ //
+ // Input:
+ // - codec_settings : Codec settings
+ // - settings : Settings affecting the encoding itself.
+ // Input for deprecated version:
+ // - number_of_cores : Number of cores available for the encoder
+ // - max_payload_size : The maximum size each payload is allowed
+ // to have. Usually MTU - overhead.
+ //
+ // Return value : Set bit rate if OK
+ // <0 - Errors:
+ // WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+ // WEBRTC_VIDEO_CODEC_ERR_SIZE
+ // WEBRTC_VIDEO_CODEC_MEMORY
+ // WEBRTC_VIDEO_CODEC_ERROR
+ // TODO(bugs.webrtc.org/10720): After updating downstream projects and posting
+ // an announcement to discuss-webrtc, remove the three-parameters variant
+ // and make the two-parameters variant pure-virtual.
+ /* ABSL_DEPRECATED("bugs.webrtc.org/10720") */ virtual int32_t InitEncode(
+ const VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size);
+ virtual int InitEncode(const VideoCodec* codec_settings,
+ const VideoEncoder::Settings& settings);
+
+ // Register an encode complete callback object.
+ //
+ // Input:
+ // - callback : Callback object which handles encoded images.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+ virtual int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) = 0;
+
+ // Free encoder memory.
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+ virtual int32_t Release() = 0;
+
+ // Encode an image (as a part of a video stream). The encoded image
+ // will be returned to the user through the encode complete callback.
+ //
+ // Input:
+ // - frame : Image to be encoded
+ // - frame_types : Frame type to be generated by the encoder.
+ //
+ // Return value : WEBRTC_VIDEO_CODEC_OK if OK
+ // <0 - Errors:
+ // WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+ // WEBRTC_VIDEO_CODEC_MEMORY
+ // WEBRTC_VIDEO_CODEC_ERROR
+ virtual int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) = 0;
+
+ // Sets rate control parameters: bitrate, framerate, etc. These settings are
+ // instantaneous (i.e. not moving averages) and should apply from now until
+ // the next call to SetRates().
+ virtual void SetRates(const RateControlParameters& parameters) = 0;
+
+ // Inform the encoder when the packet loss rate changes.
+ //
+ // Input: - packet_loss_rate : The packet loss rate (0.0 to 1.0).
+ virtual void OnPacketLossRateUpdate(float packet_loss_rate);
+
+ // Inform the encoder when the round trip time changes.
+ //
+ // Input: - rtt_ms : The new RTT, in milliseconds.
+ virtual void OnRttUpdate(int64_t rtt_ms);
+
+ // Called when a loss notification is received.
+ virtual void OnLossNotification(const LossNotification& loss_notification);
+
+ // Returns meta-data about the encoder, such as implementation name.
+ // The output of this method may change during runtime. For instance if a
+ // hardware encoder fails, it may fall back to doing software encoding using
+ // an implementation with different characteristics.
+ virtual EncoderInfo GetEncoderInfo() const;
+};
+} // namespace webrtc
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory.h
new file mode 100644
index 0000000000..d28a2a4035
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/units/data_rate.h"
+#include "api/video/render_resolution.h"
+#include "api/video_codecs/sdp_video_format.h"
+
+namespace webrtc {
+
+class VideoEncoder;
+
+// A factory that creates VideoEncoders.
+// NOTE: This class is still under development and may change without notice.
+class VideoEncoderFactory {
+ public:
+ struct CodecSupport {
+ bool is_supported = false;
+ bool is_power_efficient = false;
+ };
+
+ // An injectable class that is continuously updated with encoding conditions
+ // and selects the best encoder given those conditions. An implementation is
+ // typically stateful to avoid toggling between different encoders, which is
+ // costly due to recreation of objects, a new codec will always start with a
+ // key-frame.
+ class EncoderSelectorInterface {
+ public:
+ virtual ~EncoderSelectorInterface() {}
+
+ // Informs the encoder selector about which encoder that is currently being
+ // used.
+ virtual void OnCurrentEncoder(const SdpVideoFormat& format) = 0;
+
+ // Called every time the available bitrate is updated. Should return a
+ // non-empty if an encoder switch should be performed.
+ virtual absl::optional<SdpVideoFormat> OnAvailableBitrate(
+ const DataRate& rate) = 0;
+
+ // Called every time the encoder input resolution change. Should return a
+ // non-empty if an encoder switch should be performed.
+ virtual absl::optional<SdpVideoFormat> OnResolutionChange(
+ const RenderResolution& resolution) {
+ return absl::nullopt;
+ }
+
+ // Called if the currently used encoder reports itself as broken. Should
+ // return a non-empty if an encoder switch should be performed.
+ virtual absl::optional<SdpVideoFormat> OnEncoderBroken() = 0;
+ };
+
+ // Returns a list of supported video formats in order of preference, to use
+ // for signaling etc.
+ virtual std::vector<SdpVideoFormat> GetSupportedFormats() const = 0;
+
+ // Returns a list of supported video formats in order of preference, that can
+ // also be tagged with additional information to allow the VideoEncoderFactory
+ // to separate between different implementations when CreateVideoEncoder is
+ // called.
+ virtual std::vector<SdpVideoFormat> GetImplementations() const {
+ return GetSupportedFormats();
+ }
+
+ // Query whether the specifed format is supported or not and if it will be
+ // power efficient, which is currently interpreted as if there is support for
+ // hardware acceleration.
+ // See https://w3c.github.io/webrtc-svc/#scalabilitymodes* for a specification
+ // of valid values for `scalability_mode`.
+ // NOTE: QueryCodecSupport is currently an experimental feature that is
+ // subject to change without notice.
+ virtual CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const {
+ // Default implementation, query for supported formats and check if the
+ // specified format is supported. Returns false if scalability_mode is
+ // specified.
+ CodecSupport codec_support;
+ if (!scalability_mode) {
+ codec_support.is_supported = format.IsCodecInList(GetSupportedFormats());
+ }
+ return codec_support;
+ }
+
+ // Creates a VideoEncoder for the specified format.
+ virtual std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) = 0;
+
+ // This method creates a EncoderSelector to use for a VideoSendStream.
+ // (and hence should probably been called CreateEncoderSelector()).
+ //
+ // Note: This method is unsuitable if encoding several streams that
+ // are using same VideoEncoderFactory (either by several streams in one
+ // PeerConnection or streams with different PeerConnection but same
+ // PeerConnectionFactory). This is due to the fact that the method is not
+ // given any stream identifier, nor is the EncoderSelectorInterface given any
+ // stream identifiers, i.e one does not know which stream is being encoded
+ // with help of the selector.
+ //
+ // In such scenario, the `RtpSenderInterface::SetEncoderSelector` is
+ // recommended.
+ //
+ // TODO(bugs.webrtc.org:14122): Deprecate and remove in favor of
+ // `RtpSenderInterface::SetEncoderSelector`.
+ virtual std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const {
+ return nullptr;
+ }
+
+ virtual ~VideoEncoderFactory() {}
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template.h
new file mode 100644
index 0000000000..643096dbbb
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/algorithm/container.h"
+#include "api/array_view.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "modules/video_coding/svc/scalability_mode_util.h"
+
+namespace webrtc {
+// The VideoEncoderFactoryTemplate supports encoders implementations given as
+// template arguments.
+//
+// To include an encoder in the factory it requires three static members
+// functions to be defined:
+//
+// // Returns the supported SdpVideoFormats this encoder can produce.
+// static std::vector<SdpVideoFormat> SupportedFormats();
+//
+// // Creates an encoder instance for the given format.
+// static std::unique_ptr<VideoEncoder>
+// CreateEncoder(const SdpVideoFormat& format);
+//
+// // Returns true if the encoder supports the given scalability mode.
+// static bool
+// IsScalabilityModeSupported(ScalabilityMode scalability_mode);
+//
+// Note that the order of the template arguments matter as the factory will
+// query/return the first encoder implementation supporting the given
+// SdpVideoFormat.
+template <typename... Ts>
+class VideoEncoderFactoryTemplate : public VideoEncoderFactory {
+ public:
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override {
+ return GetSupportedFormatsInternal<Ts...>();
+ }
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override {
+ return CreateVideoEncoderInternal<Ts...>(format);
+ }
+
+ CodecSupport QueryCodecSupport(
+ const SdpVideoFormat& format,
+ absl::optional<std::string> scalability_mode) const override {
+ return QueryCodecSupportInternal<Ts...>(format, scalability_mode);
+ }
+
+ private:
+ bool IsFormatInList(
+ const SdpVideoFormat& format,
+ rtc::ArrayView<const SdpVideoFormat> supported_formats) const {
+ return absl::c_any_of(
+ supported_formats, [&](const SdpVideoFormat& supported_format) {
+ return supported_format.name == format.name &&
+ supported_format.parameters == format.parameters;
+ });
+ }
+
+ template <typename V>
+ bool IsScalabilityModeSupported(
+ const absl::optional<std::string>& scalability_mode_string) const {
+ if (!scalability_mode_string.has_value()) {
+ return true;
+ }
+ absl::optional<ScalabilityMode> scalability_mode =
+ ScalabilityModeFromString(*scalability_mode_string);
+ return scalability_mode.has_value() &&
+ V::IsScalabilityModeSupported(*scalability_mode);
+ }
+
+ template <typename V, typename... Vs>
+ std::vector<SdpVideoFormat> GetSupportedFormatsInternal() const {
+ auto supported_formats = V::SupportedFormats();
+
+ if constexpr (sizeof...(Vs) > 0) {
+ // Supported formats may overlap between implementations, so duplicates
+ // should be filtered out.
+ for (const auto& other_format : GetSupportedFormatsInternal<Vs...>()) {
+ if (!IsFormatInList(other_format, supported_formats)) {
+ supported_formats.push_back(other_format);
+ }
+ }
+ }
+
+ return supported_formats;
+ }
+
+ template <typename V, typename... Vs>
+ std::unique_ptr<VideoEncoder> CreateVideoEncoderInternal(
+ const SdpVideoFormat& format) {
+ if (IsFormatInList(format, V::SupportedFormats())) {
+ return V::CreateEncoder(format);
+ }
+
+ if constexpr (sizeof...(Vs) > 0) {
+ return CreateVideoEncoderInternal<Vs...>(format);
+ }
+
+ return nullptr;
+ }
+
+ template <typename V, typename... Vs>
+ CodecSupport QueryCodecSupportInternal(
+ const SdpVideoFormat& format,
+ const absl::optional<std::string>& scalability_mode) const {
+ if (IsFormatInList(format, V::SupportedFormats())) {
+ return {.is_supported = IsScalabilityModeSupported<V>(scalability_mode)};
+ }
+
+ if constexpr (sizeof...(Vs) > 0) {
+ return QueryCodecSupportInternal<Vs...>(format, scalability_mode);
+ }
+
+ return {.is_supported = false};
+ }
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h
new file mode 100644
index 0000000000..417df1e192
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "modules/video_coding/codecs/av1/av1_svc_config.h"
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+
+namespace webrtc {
+struct LibaomAv1EncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ absl::InlinedVector<ScalabilityMode, kScalabilityModeCount>
+ scalability_modes = LibaomAv1EncoderSupportedScalabilityModes();
+ return {
+ SdpVideoFormat("AV1", SdpVideoFormat::Parameters(), scalability_modes)};
+ }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+ return CreateLibaomAv1Encoder();
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+ return LibaomAv1EncoderSupportsScalabilityMode(scalability_mode);
+ }
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBAOM_AV1_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h
new file mode 100644
index 0000000000..0f0a9bacd5
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "absl/container/inlined_vector.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp8/vp8_scalability.h"
+
+namespace webrtc {
+struct LibvpxVp8EncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ absl::InlinedVector<ScalabilityMode, kScalabilityModeCount>
+ scalability_modes;
+ for (const auto scalability_mode : kVP8SupportedScalabilityModes) {
+ scalability_modes.push_back(scalability_mode);
+ }
+
+ return {
+ SdpVideoFormat("VP8", SdpVideoFormat::Parameters(), scalability_modes)};
+ }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+ return VP8Encoder::Create();
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+ return VP8SupportsScalabilityMode(scalability_mode);
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP8_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h
new file mode 100644
index 0000000000..c10fda4dc2
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+namespace webrtc {
+struct LibvpxVp9EncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+ return SupportedVP9Codecs(/*add_scalability_modes=*/true);
+ }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+ return VP9Encoder::Create(cricket::VideoCodec(format));
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+ return VP9Encoder::SupportsScalabilityMode(scalability_mode);
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_LIBVPX_VP9_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h
new file mode 100644
index 0000000000..0830460cdb
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
+
+#include <memory>
+#include <vector>
+
+#include "modules/video_coding/codecs/h264/include/h264.h"
+
+namespace webrtc {
+// TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build
+// target remove #ifdefs.
+struct OpenH264EncoderTemplateAdapter {
+ static std::vector<SdpVideoFormat> SupportedFormats() {
+#if defined(WEBRTC_USE_H264)
+ return SupportedH264Codecs(/*add_scalability_modes=*/true);
+#else
+ return {};
+#endif
+ }
+
+ static std::unique_ptr<VideoEncoder> CreateEncoder(
+ const SdpVideoFormat& format) {
+#if defined(WEBRTC_USE_H264)
+ return H264Encoder::Create(cricket::VideoCodec(format));
+#else
+ return nullptr;
+#endif
+ }
+
+ static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) {
+#if defined(WEBRTC_USE_H264)
+ return H264Encoder::SupportsScalabilityMode(scalability_mode);
+#else
+ return false;
+#endif
+ }
+};
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_OPEN_H264_ADAPTER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc
new file mode 100644
index 0000000000..39c52a0081
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc
@@ -0,0 +1,519 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
+
+#include <stdint.h>
+
+#include <cstdio>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/strings/match.h"
+#include "absl/types/optional.h"
+#include "api/fec_controller_override.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_bitrate_allocation.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "media/base/video_common.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/utility/simulcast_utility.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+
+namespace {
+
+// If forced fallback is allowed, either:
+//
+// 1) The forced fallback is requested if the resolution is less than or equal
+// to `max_pixels_`. The resolution is allowed to be scaled down to
+// `min_pixels_`.
+//
+// 2) The forced fallback is requested if temporal support is preferred and the
+// SW fallback supports temporal layers while the HW encoder does not.
+
+struct ForcedFallbackParams {
+ public:
+ bool SupportsResolutionBasedSwitch(const VideoCodec& codec) const {
+ return enable_resolution_based_switch &&
+ codec.codecType == kVideoCodecVP8 &&
+ codec.numberOfSimulcastStreams <= 1 &&
+ codec.width * codec.height <= max_pixels;
+ }
+
+ bool SupportsTemporalBasedSwitch(const VideoCodec& codec) const {
+ return enable_temporal_based_switch &&
+ SimulcastUtility::NumberOfTemporalLayers(codec, 0) != 1;
+ }
+
+ bool enable_temporal_based_switch = false;
+ bool enable_resolution_based_switch = false;
+ int min_pixels = 320 * 180;
+ int max_pixels = 320 * 240;
+};
+
+const char kVp8ForceFallbackEncoderFieldTrial[] =
+ "WebRTC-VP8-Forced-Fallback-Encoder-v2";
+
+absl::optional<ForcedFallbackParams> ParseFallbackParamsFromFieldTrials(
+ const VideoEncoder& main_encoder) {
+ const std::string field_trial =
+ webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial);
+ if (!absl::StartsWith(field_trial, "Enabled")) {
+ return absl::nullopt;
+ }
+
+ int max_pixels_lower_bound =
+ main_encoder.GetEncoderInfo().scaling_settings.min_pixels_per_frame - 1;
+
+ ForcedFallbackParams params;
+ params.enable_resolution_based_switch = true;
+
+ int min_bps = 0;
+ if (sscanf(field_trial.c_str(), "Enabled-%d,%d,%d", &params.min_pixels,
+ &params.max_pixels, &min_bps) != 3) {
+ RTC_LOG(LS_WARNING)
+ << "Invalid number of forced fallback parameters provided.";
+ return absl::nullopt;
+ } else if (params.min_pixels <= 0 ||
+ params.max_pixels < max_pixels_lower_bound ||
+ params.max_pixels < params.min_pixels || min_bps <= 0) {
+ RTC_LOG(LS_WARNING) << "Invalid forced fallback parameter value provided.";
+ return absl::nullopt;
+ }
+
+ return params;
+}
+
+absl::optional<ForcedFallbackParams> GetForcedFallbackParams(
+ bool prefer_temporal_support,
+ const VideoEncoder& main_encoder) {
+ absl::optional<ForcedFallbackParams> params =
+ ParseFallbackParamsFromFieldTrials(main_encoder);
+ if (prefer_temporal_support) {
+ if (!params.has_value()) {
+ params.emplace();
+ }
+ params->enable_temporal_based_switch = prefer_temporal_support;
+ }
+ return params;
+}
+
+class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder {
+ public:
+ VideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<webrtc::VideoEncoder> sw_encoder,
+ std::unique_ptr<webrtc::VideoEncoder> hw_encoder,
+ bool prefer_temporal_support);
+ ~VideoEncoderSoftwareFallbackWrapper() override;
+
+ void SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) override;
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const VideoEncoder::Settings& settings) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ void OnPacketLossRateUpdate(float packet_loss_rate) override;
+
+ void OnRttUpdate(int64_t rtt_ms) override;
+
+ void OnLossNotification(const LossNotification& loss_notification) override;
+
+ void SetRates(const RateControlParameters& parameters) override;
+
+ EncoderInfo GetEncoderInfo() const override;
+
+ private:
+ bool InitFallbackEncoder(bool is_forced);
+ bool TryInitForcedFallbackEncoder();
+ bool IsFallbackActive() const;
+
+ VideoEncoder* current_encoder() {
+ switch (encoder_state_) {
+ case EncoderState::kUninitialized:
+ RTC_LOG(LS_WARNING)
+ << "Trying to access encoder in uninitialized fallback wrapper.";
+ // Return main encoder to preserve previous behavior.
+ [[fallthrough]];
+ case EncoderState::kMainEncoderUsed:
+ return encoder_.get();
+ case EncoderState::kFallbackDueToFailure:
+ case EncoderState::kForcedFallback:
+ return fallback_encoder_.get();
+ }
+ RTC_CHECK_NOTREACHED();
+ }
+
+ // Updates encoder with last observed parameters, such as callbacks, rates,
+ // etc.
+ void PrimeEncoder(VideoEncoder* encoder) const;
+
+ // Settings used in the last InitEncode call and used if a dynamic fallback to
+ // software is required.
+ VideoCodec codec_settings_;
+ absl::optional<VideoEncoder::Settings> encoder_settings_;
+
+ // The last rate control settings, if set.
+ absl::optional<RateControlParameters> rate_control_parameters_;
+
+ // The last channel parameters set.
+ absl::optional<float> packet_loss_;
+ absl::optional<int64_t> rtt_;
+ absl::optional<LossNotification> loss_notification_;
+
+ enum class EncoderState {
+ kUninitialized,
+ kMainEncoderUsed,
+ kFallbackDueToFailure,
+ kForcedFallback
+ };
+
+ EncoderState encoder_state_;
+ const std::unique_ptr<webrtc::VideoEncoder> encoder_;
+ const std::unique_ptr<webrtc::VideoEncoder> fallback_encoder_;
+
+ EncodedImageCallback* callback_;
+
+ const absl::optional<ForcedFallbackParams> fallback_params_;
+ int32_t EncodeWithMainEncoder(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types);
+};
+
+VideoEncoderSoftwareFallbackWrapper::VideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<webrtc::VideoEncoder> sw_encoder,
+ std::unique_ptr<webrtc::VideoEncoder> hw_encoder,
+ bool prefer_temporal_support)
+ : encoder_state_(EncoderState::kUninitialized),
+ encoder_(std::move(hw_encoder)),
+ fallback_encoder_(std::move(sw_encoder)),
+ callback_(nullptr),
+ fallback_params_(
+ GetForcedFallbackParams(prefer_temporal_support, *encoder_)) {
+ RTC_DCHECK(fallback_encoder_);
+}
+
+VideoEncoderSoftwareFallbackWrapper::~VideoEncoderSoftwareFallbackWrapper() =
+ default;
+
+void VideoEncoderSoftwareFallbackWrapper::PrimeEncoder(
+ VideoEncoder* encoder) const {
+ RTC_DCHECK(encoder);
+ // Replay callback, rates, and channel parameters.
+ if (callback_) {
+ encoder->RegisterEncodeCompleteCallback(callback_);
+ }
+ if (rate_control_parameters_) {
+ encoder->SetRates(*rate_control_parameters_);
+ }
+ if (rtt_.has_value()) {
+ encoder->OnRttUpdate(rtt_.value());
+ }
+ if (packet_loss_.has_value()) {
+ encoder->OnPacketLossRateUpdate(packet_loss_.value());
+ }
+
+ if (loss_notification_.has_value()) {
+ encoder->OnLossNotification(loss_notification_.value());
+ }
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder(bool is_forced) {
+ RTC_LOG(LS_WARNING) << "Encoder falling back to software encoding.";
+
+ RTC_DCHECK(encoder_settings_.has_value());
+ const int ret = fallback_encoder_->InitEncode(&codec_settings_,
+ encoder_settings_.value());
+
+ if (ret != WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize software-encoder fallback.";
+ fallback_encoder_->Release();
+ return false;
+ }
+
+ if (encoder_state_ == EncoderState::kMainEncoderUsed) {
+ // Since we're switching to the fallback encoder, Release the real encoder.
+ // It may be re-initialized via InitEncode later, and it will continue to
+ // get Set calls for rates and channel parameters in the meantime.
+ encoder_->Release();
+ }
+
+ if (is_forced) {
+ encoder_state_ = EncoderState::kForcedFallback;
+ } else {
+ encoder_state_ = EncoderState::kFallbackDueToFailure;
+ }
+
+ return true;
+}
+
+void VideoEncoderSoftwareFallbackWrapper::SetFecControllerOverride(
+ FecControllerOverride* fec_controller_override) {
+ // It is important that only one of those would ever interact with the
+ // `fec_controller_override` at a given time. This is the responsibility
+ // of `this` to maintain.
+
+ encoder_->SetFecControllerOverride(fec_controller_override);
+ fallback_encoder_->SetFecControllerOverride(fec_controller_override);
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode(
+ const VideoCodec* codec_settings,
+ const VideoEncoder::Settings& settings) {
+ // Store settings, in case we need to dynamically switch to the fallback
+ // encoder after a failed Encode call.
+ codec_settings_ = *codec_settings;
+ encoder_settings_ = settings;
+ // Clear stored rate/channel parameters.
+ rate_control_parameters_ = absl::nullopt;
+
+ RTC_DCHECK_EQ(encoder_state_, EncoderState::kUninitialized)
+ << "InitEncode() should never be called on an active instance!";
+
+ // Try to init forced software codec if it should be used.
+ if (TryInitForcedFallbackEncoder()) {
+ PrimeEncoder(current_encoder());
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ int32_t ret = encoder_->InitEncode(codec_settings, settings);
+ if (ret == WEBRTC_VIDEO_CODEC_OK) {
+ encoder_state_ = EncoderState::kMainEncoderUsed;
+ PrimeEncoder(current_encoder());
+ return ret;
+ }
+
+ // Try to instantiate software codec.
+ if (InitFallbackEncoder(/*is_forced=*/false)) {
+ PrimeEncoder(current_encoder());
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ // Software encoder failed too, use original return code.
+ encoder_state_ = EncoderState::kUninitialized;
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return current_encoder()->RegisterEncodeCompleteCallback(callback);
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::Release() {
+ if (encoder_state_ == EncoderState::kUninitialized) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ int32_t ret = current_encoder()->Release();
+ encoder_state_ = EncoderState::kUninitialized;
+ return ret;
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::Encode(
+ const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) {
+ switch (encoder_state_) {
+ case EncoderState::kUninitialized:
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ case EncoderState::kMainEncoderUsed: {
+ return EncodeWithMainEncoder(frame, frame_types);
+ }
+ case EncoderState::kFallbackDueToFailure:
+ case EncoderState::kForcedFallback:
+ return fallback_encoder_->Encode(frame, frame_types);
+ }
+ RTC_CHECK_NOTREACHED();
+}
+
+int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder(
+ const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) {
+ int32_t ret = encoder_->Encode(frame, frame_types);
+ // If requested, try a software fallback.
+ bool fallback_requested = (ret == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
+ if (fallback_requested && InitFallbackEncoder(/*is_forced=*/false)) {
+ // Start using the fallback with this frame.
+ PrimeEncoder(current_encoder());
+ if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative &&
+ fallback_encoder_->GetEncoderInfo().supports_native_handle) {
+ return fallback_encoder_->Encode(frame, frame_types);
+ } else {
+ RTC_LOG(LS_INFO) << "Fallback encoder does not support native handle - "
+ "converting frame to I420";
+ rtc::scoped_refptr<I420BufferInterface> src_buffer =
+ frame.video_frame_buffer()->ToI420();
+ if (!src_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to convert from to I420";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
+ rtc::scoped_refptr<VideoFrameBuffer> dst_buffer =
+ src_buffer->Scale(codec_settings_.width, codec_settings_.height);
+ if (!dst_buffer) {
+ RTC_LOG(LS_ERROR) << "Failed to scale video frame.";
+ return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
+ }
+ VideoFrame scaled_frame = frame;
+ scaled_frame.set_video_frame_buffer(dst_buffer);
+ scaled_frame.set_update_rect(VideoFrame::UpdateRect{
+ 0, 0, scaled_frame.width(), scaled_frame.height()});
+ return fallback_encoder_->Encode(scaled_frame, frame_types);
+ }
+ }
+ // Fallback encoder failed too, return original error code.
+ return ret;
+}
+
+void VideoEncoderSoftwareFallbackWrapper::SetRates(
+ const RateControlParameters& parameters) {
+ rate_control_parameters_ = parameters;
+ return current_encoder()->SetRates(parameters);
+}
+
+void VideoEncoderSoftwareFallbackWrapper::OnPacketLossRateUpdate(
+ float packet_loss_rate) {
+ packet_loss_ = packet_loss_rate;
+ current_encoder()->OnPacketLossRateUpdate(packet_loss_rate);
+}
+
+void VideoEncoderSoftwareFallbackWrapper::OnRttUpdate(int64_t rtt_ms) {
+ rtt_ = rtt_ms;
+ current_encoder()->OnRttUpdate(rtt_ms);
+}
+
+void VideoEncoderSoftwareFallbackWrapper::OnLossNotification(
+ const LossNotification& loss_notification) {
+ loss_notification_ = loss_notification;
+ current_encoder()->OnLossNotification(loss_notification);
+}
+
+VideoEncoder::EncoderInfo VideoEncoderSoftwareFallbackWrapper::GetEncoderInfo()
+ const {
+ EncoderInfo fallback_encoder_info = fallback_encoder_->GetEncoderInfo();
+ EncoderInfo default_encoder_info = encoder_->GetEncoderInfo();
+
+ EncoderInfo info =
+ IsFallbackActive() ? fallback_encoder_info : default_encoder_info;
+
+ info.requested_resolution_alignment = cricket::LeastCommonMultiple(
+ fallback_encoder_info.requested_resolution_alignment,
+ default_encoder_info.requested_resolution_alignment);
+ info.apply_alignment_to_all_simulcast_layers =
+ fallback_encoder_info.apply_alignment_to_all_simulcast_layers ||
+ default_encoder_info.apply_alignment_to_all_simulcast_layers;
+
+ if (fallback_params_.has_value()) {
+ const auto settings = (encoder_state_ == EncoderState::kForcedFallback)
+ ? fallback_encoder_info.scaling_settings
+ : default_encoder_info.scaling_settings;
+ info.scaling_settings =
+ settings.thresholds
+ ? VideoEncoder::ScalingSettings(settings.thresholds->low,
+ settings.thresholds->high,
+ fallback_params_->min_pixels)
+ : VideoEncoder::ScalingSettings::kOff;
+ } else {
+ info.scaling_settings = default_encoder_info.scaling_settings;
+ }
+
+ return info;
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::IsFallbackActive() const {
+ return encoder_state_ == EncoderState::kForcedFallback ||
+ encoder_state_ == EncoderState::kFallbackDueToFailure;
+}
+
+bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() {
+ if (!fallback_params_) {
+ return false;
+ }
+
+ RTC_DCHECK_EQ(encoder_state_, EncoderState::kUninitialized);
+
+ if (fallback_params_->SupportsResolutionBasedSwitch(codec_settings_)) {
+ // Settings valid, try to instantiate software codec.
+ RTC_LOG(LS_INFO) << "Request forced SW encoder fallback: "
+ << codec_settings_.width << "x" << codec_settings_.height;
+ return InitFallbackEncoder(/*is_forced=*/true);
+ }
+
+ if (fallback_params_->SupportsTemporalBasedSwitch(codec_settings_)) {
+ // First init main encoder to see if that supports temporal layers.
+ if (encoder_->InitEncode(&codec_settings_, encoder_settings_.value()) ==
+ WEBRTC_VIDEO_CODEC_OK) {
+ encoder_state_ = EncoderState::kMainEncoderUsed;
+ }
+
+ if (encoder_state_ == EncoderState::kMainEncoderUsed &&
+ encoder_->GetEncoderInfo().fps_allocation[0].size() != 1) {
+ // Primary encoder already supports temporal layers, use that instead.
+ return true;
+ }
+
+ // Try to initialize fallback and check if it supports temporal layers.
+ if (fallback_encoder_->InitEncode(&codec_settings_,
+ encoder_settings_.value()) ==
+ WEBRTC_VIDEO_CODEC_OK) {
+ if (fallback_encoder_->GetEncoderInfo().fps_allocation[0].size() != 1) {
+ // Fallback encoder available and supports temporal layers, use it!
+ if (encoder_state_ == EncoderState::kMainEncoderUsed) {
+ // Main encoder initialized but does not support temporal layers,
+ // release it again.
+ encoder_->Release();
+ }
+ encoder_state_ = EncoderState::kForcedFallback;
+ RTC_LOG(LS_INFO)
+ << "Forced switch to SW encoder due to temporal support.";
+ return true;
+ } else {
+ // Fallback encoder intialization succeeded, but it does not support
+ // temporal layers either - release it.
+ fallback_encoder_->Release();
+ }
+ }
+
+ if (encoder_state_ == EncoderState::kMainEncoderUsed) {
+ // Main encoder already initialized - make use of it.
+ RTC_LOG(LS_INFO)
+ << "Cannot fall back for temporal support since fallback that "
+ "supports is not available. Using main encoder instead.";
+ return true;
+ }
+ }
+
+ // Neither forced fallback mode supported.
+ return false;
+}
+
+} // namespace
+
+std::unique_ptr<VideoEncoder> CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder> sw_fallback_encoder,
+ std::unique_ptr<VideoEncoder> hw_encoder,
+ bool prefer_temporal_support) {
+ return std::make_unique<VideoEncoderSoftwareFallbackWrapper>(
+ std::move(sw_fallback_encoder), std::move(hw_encoder),
+ prefer_temporal_support);
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.h b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.h
new file mode 100644
index 0000000000..6e6902eb3f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_
+#define API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_
+
+#include <memory>
+#include <utility>
+
+#include "api/video_codecs/video_encoder.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Used to wrap external VideoEncoders to provide a fallback option on
+// software encoding when a hardware encoder fails to encode a stream due to
+// hardware restrictions, such as max resolution.
+// |bool prefer_temporal_support| indicates that if the software fallback
+// encoder supports temporal layers but the hardware encoder does not, a
+// fallback should be forced even if the encoder otherwise works.
+RTC_EXPORT std::unique_ptr<VideoEncoder>
+CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder> sw_fallback_encoder,
+ std::unique_ptr<VideoEncoder> hw_encoder,
+ bool prefer_temporal_support);
+
+// Default fallback for call-sites not yet updated with
+// `prefer_temporal_support`.
+// TODO(sprang): Remove when usage is gone.
+RTC_EXPORT inline std::unique_ptr<VideoEncoder>
+CreateVideoEncoderSoftwareFallbackWrapper(
+ std::unique_ptr<VideoEncoder> sw_fallback_encoder,
+ std::unique_ptr<VideoEncoder> hw_encoder) {
+ return CreateVideoEncoderSoftwareFallbackWrapper(
+ std::move(sw_fallback_encoder), std::move(hw_encoder), false);
+}
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_frame_buffer_controller.h b/third_party/libwebrtc/api/video_codecs/vp8_frame_buffer_controller.h
new file mode 100644
index 0000000000..fc494f7293
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_frame_buffer_controller.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VP8_FRAME_BUFFER_CONTROLLER_H_
+#define API_VIDEO_CODECS_VP8_FRAME_BUFFER_CONTROLLER_H_
+
+#include <array>
+#include <memory>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/fec_controller_override.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/vp8_frame_config.h"
+
+namespace webrtc {
+
+// Some notes on the prerequisites of the TemporalLayers interface.
+// * Vp8FrameBufferController is not thread safe, synchronization is the
+// caller's responsibility.
+// * The encoder is assumed to encode all frames in order, and callbacks to
+// PopulateCodecSpecific() / OnEncodeDone() must happen in the same order.
+//
+// This means that in the case of pipelining encoders, it is OK to have a chain
+// of calls such as this:
+// - NextFrameConfig(timestampA)
+// - NextFrameConfig(timestampB)
+// - PopulateCodecSpecific(timestampA, ...)
+// - NextFrameConfig(timestampC)
+// - OnEncodeDone(timestampA, 1234, ...)
+// - NextFrameConfig(timestampC)
+// - OnEncodeDone(timestampB, 0, ...)
+// - OnEncodeDone(timestampC, 1234, ...)
+// Note that NextFrameConfig() for a new frame can happen before
+// OnEncodeDone() for a previous one, but calls themselves must be both
+// synchronized (e.g. run on a task queue) and in order (per type).
+//
+// TODO(eladalon): Revise comment (referring to PopulateCodecSpecific in this
+// context is not very meaningful).
+
+struct CodecSpecificInfo;
+
+// Each member represents an override of the VPX configuration if the optional
+// value is set.
+struct Vp8EncoderConfig {
+ struct TemporalLayerConfig {
+ bool operator!=(const TemporalLayerConfig& other) const {
+ return ts_number_layers != other.ts_number_layers ||
+ ts_target_bitrate != other.ts_target_bitrate ||
+ ts_rate_decimator != other.ts_rate_decimator ||
+ ts_periodicity != other.ts_periodicity ||
+ ts_layer_id != other.ts_layer_id;
+ }
+
+ static constexpr size_t kMaxPeriodicity = 16;
+ static constexpr size_t kMaxLayers = 5;
+
+ // Number of active temporal layers. Set to 0 if not used.
+ uint32_t ts_number_layers;
+
+ // Arrays of length `ts_number_layers`, indicating (cumulative) target
+ // bitrate and rate decimator (e.g. 4 if every 4th frame is in the given
+ // layer) for each active temporal layer, starting with temporal id 0.
+ std::array<uint32_t, kMaxLayers> ts_target_bitrate;
+ std::array<uint32_t, kMaxLayers> ts_rate_decimator;
+
+ // The periodicity of the temporal pattern. Set to 0 if not used.
+ uint32_t ts_periodicity;
+
+ // Array of length `ts_periodicity` indicating the sequence of temporal id's
+ // to assign to incoming frames.
+ std::array<uint32_t, kMaxPeriodicity> ts_layer_id;
+ };
+
+ absl::optional<TemporalLayerConfig> temporal_layer_config;
+
+ // Target bitrate, in bps.
+ absl::optional<uint32_t> rc_target_bitrate;
+
+ // Clamp QP to max. Use 0 to disable clamping.
+ absl::optional<uint32_t> rc_max_quantizer;
+
+ // Error resilience mode.
+ absl::optional<uint32_t> g_error_resilient;
+
+ // If set to true, all previous configuration overrides should be reset.
+ bool reset_previous_configuration_overrides = false;
+};
+
+// This interface defines a way of delegating the logic of buffer management.
+// Multiple streams may be controlled by a single controller, demuxing between
+// them using stream_index.
+class Vp8FrameBufferController {
+ public:
+ virtual ~Vp8FrameBufferController() = default;
+
+ // Set limits on QP.
+ // The limits are suggestion-only; the controller is allowed to exceed them.
+ virtual void SetQpLimits(size_t stream_index, int min_qp, int max_qp) = 0;
+
+ // Number of streamed controlled by `this`.
+ virtual size_t StreamCount() const = 0;
+
+ // If this method returns true, the encoder is free to drop frames for
+ // instance in an effort to uphold encoding bitrate.
+ // If this return false, the encoder must not drop any frames unless:
+ // 1. Requested to do so via Vp8FrameConfig.drop_frame
+ // 2. The frame to be encoded is requested to be a keyframe
+ // 3. The encoder detected a large overshoot and decided to drop and then
+ // re-encode the image at a low bitrate. In this case the encoder should
+ // call OnFrameDropped() once to indicate drop, and then call
+ // OnEncodeDone() again when the frame has actually been encoded.
+ virtual bool SupportsEncoderFrameDropping(size_t stream_index) const = 0;
+
+ // New target bitrate for a stream (each entry in
+ // `bitrates_bps` is for another temporal layer).
+ virtual void OnRatesUpdated(size_t stream_index,
+ const std::vector<uint32_t>& bitrates_bps,
+ int framerate_fps) = 0;
+
+ // Called by the encoder before encoding a frame. Returns a set of overrides
+ // the controller wishes to enact in the encoder's configuration.
+ // If a value is not overridden, previous overrides are still in effect.
+ // However, if `Vp8EncoderConfig::reset_previous_configuration_overrides`
+ // is set to `true`, all previous overrides are reset.
+ virtual Vp8EncoderConfig UpdateConfiguration(size_t stream_index) = 0;
+
+ // Returns the recommended VP8 encode flags needed.
+ // The timestamp may be used as both a time and a unique identifier, and so
+ // the caller must make sure no two frames use the same timestamp.
+ // The timestamp uses a 90kHz RTP clock.
+ // After calling this method, first call the actual encoder with the provided
+ // frame configuration, and then OnEncodeDone() below.
+ virtual Vp8FrameConfig NextFrameConfig(size_t stream_index,
+ uint32_t rtp_timestamp) = 0;
+
+ // Called after the encode step is done. `rtp_timestamp` must match the
+ // parameter use in the NextFrameConfig() call.
+ // `is_keyframe` must be true iff the encoder decided to encode this frame as
+ // a keyframe.
+ // If `info` is not null, the encoder may update `info` with codec specific
+ // data such as temporal id. `qp` should indicate the frame-level QP this
+ // frame was encoded at. If the encoder does not support extracting this, `qp`
+ // should be set to 0.
+ virtual void OnEncodeDone(size_t stream_index,
+ uint32_t rtp_timestamp,
+ size_t size_bytes,
+ bool is_keyframe,
+ int qp,
+ CodecSpecificInfo* info) = 0;
+
+ // Called when a frame is dropped by the encoder.
+ virtual void OnFrameDropped(size_t stream_index, uint32_t rtp_timestamp) = 0;
+
+ // Called by the encoder when the packet loss rate changes.
+ // `packet_loss_rate` runs between 0.0 (no loss) and 1.0 (everything lost).
+ virtual void OnPacketLossRateUpdate(float packet_loss_rate) = 0;
+
+ // Called by the encoder when the round trip time changes.
+ virtual void OnRttUpdate(int64_t rtt_ms) = 0;
+
+ // Called when a loss notification is received.
+ virtual void OnLossNotification(
+ const VideoEncoder::LossNotification& loss_notification) = 0;
+};
+
+// Interface for a factory of Vp8FrameBufferController instances.
+class Vp8FrameBufferControllerFactory {
+ public:
+ virtual ~Vp8FrameBufferControllerFactory() = default;
+
+ // Clones oneself. (Avoids Vp8FrameBufferControllerFactoryFactory.)
+ virtual std::unique_ptr<Vp8FrameBufferControllerFactory> Clone() const = 0;
+
+ // Create a Vp8FrameBufferController instance.
+ virtual std::unique_ptr<Vp8FrameBufferController> Create(
+ const VideoCodec& codec,
+ const VideoEncoder::Settings& settings,
+ FecControllerOverride* fec_controller_override) = 0;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VP8_FRAME_BUFFER_CONTROLLER_H_
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_frame_config.cc b/third_party/libwebrtc/api/video_codecs/vp8_frame_config.cc
new file mode 100644
index 0000000000..05e1911bb7
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_frame_config.cc
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/vp8_frame_config.h"
+
+#include "modules/video_coding/codecs/interface/common_constants.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+Vp8FrameConfig::Vp8FrameConfig() : Vp8FrameConfig(kNone, kNone, kNone, false) {}
+
+Vp8FrameConfig::Vp8FrameConfig(BufferFlags last,
+ BufferFlags golden,
+ BufferFlags arf)
+ : Vp8FrameConfig(last, golden, arf, false) {}
+
+Vp8FrameConfig::Vp8FrameConfig(BufferFlags last,
+ BufferFlags golden,
+ BufferFlags arf,
+ FreezeEntropy)
+ : Vp8FrameConfig(last, golden, arf, true) {}
+
+Vp8FrameConfig::Vp8FrameConfig(BufferFlags last,
+ BufferFlags golden,
+ BufferFlags arf,
+ bool freeze_entropy)
+ : drop_frame(last == BufferFlags::kNone && golden == BufferFlags::kNone &&
+ arf == BufferFlags::kNone),
+ last_buffer_flags(last),
+ golden_buffer_flags(golden),
+ arf_buffer_flags(arf),
+ encoder_layer_id(0),
+ packetizer_temporal_idx(kNoTemporalIdx),
+ layer_sync(false),
+ freeze_entropy(freeze_entropy),
+ first_reference(Vp8BufferReference::kNone),
+ second_reference(Vp8BufferReference::kNone),
+ retransmission_allowed(true) {}
+
+bool Vp8FrameConfig::References(Buffer buffer) const {
+ switch (buffer) {
+ case Buffer::kLast:
+ return (last_buffer_flags & kReference) != 0;
+ case Buffer::kGolden:
+ return (golden_buffer_flags & kReference) != 0;
+ case Buffer::kArf:
+ return (arf_buffer_flags & kReference) != 0;
+ case Buffer::kCount:
+ break;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return false;
+}
+
+bool Vp8FrameConfig::Updates(Buffer buffer) const {
+ switch (buffer) {
+ case Buffer::kLast:
+ return (last_buffer_flags & kUpdate) != 0;
+ case Buffer::kGolden:
+ return (golden_buffer_flags & kUpdate) != 0;
+ case Buffer::kArf:
+ return (arf_buffer_flags & kUpdate) != 0;
+ case Buffer::kCount:
+ break;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return false;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_frame_config.h b/third_party/libwebrtc/api/video_codecs/vp8_frame_config.h
new file mode 100644
index 0000000000..5369bf58bc
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_frame_config.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VP8_FRAME_CONFIG_H_
+#define API_VIDEO_CODECS_VP8_FRAME_CONFIG_H_
+
+#include <stdint.h>
+
+namespace webrtc {
+
+// Configuration of a VP8 frame - which buffers are to be referenced
+// by it, which buffers should be updated, etc.
+struct Vp8FrameConfig {
+ static Vp8FrameConfig GetIntraFrameConfig() {
+ Vp8FrameConfig frame_config = Vp8FrameConfig(
+ BufferFlags::kUpdate, BufferFlags::kUpdate, BufferFlags::kUpdate);
+ frame_config.packetizer_temporal_idx = 0;
+ return frame_config;
+ }
+
+ enum BufferFlags : int {
+ kNone = 0,
+ kReference = 1,
+ kUpdate = 2,
+ kReferenceAndUpdate = kReference | kUpdate,
+ };
+
+ enum FreezeEntropy { kFreezeEntropy };
+
+ // Defined bit-maskable reference to the three buffers available in VP8.
+ enum class Vp8BufferReference : uint8_t {
+ kNone = 0,
+ kLast = 1,
+ kGolden = 2,
+ kAltref = 4
+ };
+
+ Vp8FrameConfig();
+
+ Vp8FrameConfig(BufferFlags last, BufferFlags golden, BufferFlags arf);
+ Vp8FrameConfig(BufferFlags last,
+ BufferFlags golden,
+ BufferFlags arf,
+ FreezeEntropy);
+
+ enum class Buffer : int { kLast = 0, kGolden = 1, kArf = 2, kCount };
+
+ bool References(Buffer buffer) const;
+
+ bool Updates(Buffer buffer) const;
+
+ bool IntraFrame() const {
+ // Intra frames do not reference any buffers, and update all buffers.
+ return last_buffer_flags == kUpdate && golden_buffer_flags == kUpdate &&
+ arf_buffer_flags == kUpdate;
+ }
+
+ bool drop_frame;
+ BufferFlags last_buffer_flags;
+ BufferFlags golden_buffer_flags;
+ BufferFlags arf_buffer_flags;
+
+ // The encoder layer ID is used to utilize the correct bitrate allocator
+ // inside the encoder. It does not control references nor determine which
+ // "actual" temporal layer this is. The packetizer temporal index determines
+ // which layer the encoded frame should be packetized into.
+ // Normally these are the same, but current temporal-layer strategies for
+ // screenshare use one bitrate allocator for all layers, but attempt to
+ // packetize / utilize references to split a stream into multiple layers,
+ // with different quantizer settings, to hit target bitrate.
+ // TODO(sprang): Screenshare layers are being reconsidered at the time of
+ // writing, we might be able to remove this distinction, and have a temporal
+ // layer imply both (the normal case).
+ int encoder_layer_id;
+ // TODO(eladalon/sprang): Move out of this class.
+ int packetizer_temporal_idx;
+
+ // TODO(eladalon/sprang): Move out of this class.
+ bool layer_sync;
+
+ bool freeze_entropy;
+
+ // Indicates in which order the encoder should search the reference buffers
+ // when doing motion prediction. Set to kNone to use unspecified order. Any
+ // buffer indicated here must not have the corresponding no_ref bit set.
+ // If all three buffers can be reference, the one not listed here should be
+ // searched last.
+ Vp8BufferReference first_reference;
+ Vp8BufferReference second_reference;
+
+ // Whether this frame is eligible for retransmission.
+ bool retransmission_allowed;
+
+ private:
+ Vp8FrameConfig(BufferFlags last,
+ BufferFlags golden,
+ BufferFlags arf,
+ bool freeze_entropy);
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VP8_FRAME_CONFIG_H_
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.cc b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.cc
new file mode 100644
index 0000000000..dd75c616d8
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.cc
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/vp8_temporal_layers.h"
+
+#include <utility>
+
+#include "absl/algorithm/container.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+Vp8TemporalLayers::Vp8TemporalLayers(
+ std::vector<std::unique_ptr<Vp8FrameBufferController>>&& controllers,
+ FecControllerOverride* fec_controller_override)
+ : controllers_(std::move(controllers)) {
+ RTC_DCHECK(!controllers_.empty());
+ RTC_DCHECK(absl::c_none_of(
+ controllers_,
+ [](const std::unique_ptr<Vp8FrameBufferController>& controller) {
+ return controller.get() == nullptr;
+ }));
+ if (fec_controller_override) {
+ fec_controller_override->SetFecAllowed(true);
+ }
+}
+
+void Vp8TemporalLayers::SetQpLimits(size_t stream_index,
+ int min_qp,
+ int max_qp) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->SetQpLimits(0, min_qp, max_qp);
+}
+
+size_t Vp8TemporalLayers::StreamCount() const {
+ return controllers_.size();
+}
+
+bool Vp8TemporalLayers::SupportsEncoderFrameDropping(
+ size_t stream_index) const {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->SupportsEncoderFrameDropping(0);
+}
+
+void Vp8TemporalLayers::OnRatesUpdated(
+ size_t stream_index,
+ const std::vector<uint32_t>& bitrates_bps,
+ int framerate_fps) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->OnRatesUpdated(0, bitrates_bps,
+ framerate_fps);
+}
+
+Vp8EncoderConfig Vp8TemporalLayers::UpdateConfiguration(size_t stream_index) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->UpdateConfiguration(0);
+}
+
+Vp8FrameConfig Vp8TemporalLayers::NextFrameConfig(size_t stream_index,
+ uint32_t rtp_timestamp) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->NextFrameConfig(0, rtp_timestamp);
+}
+
+void Vp8TemporalLayers::OnEncodeDone(size_t stream_index,
+ uint32_t rtp_timestamp,
+ size_t size_bytes,
+ bool is_keyframe,
+ int qp,
+ CodecSpecificInfo* info) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ return controllers_[stream_index]->OnEncodeDone(0, rtp_timestamp, size_bytes,
+ is_keyframe, qp, info);
+}
+
+void Vp8TemporalLayers::OnFrameDropped(size_t stream_index,
+ uint32_t rtp_timestamp) {
+ RTC_DCHECK_LT(stream_index, controllers_.size());
+ controllers_[stream_index]->OnFrameDropped(stream_index, rtp_timestamp);
+}
+
+void Vp8TemporalLayers::OnPacketLossRateUpdate(float packet_loss_rate) {
+ for (auto& controller : controllers_) {
+ controller->OnPacketLossRateUpdate(packet_loss_rate);
+ }
+}
+
+void Vp8TemporalLayers::OnRttUpdate(int64_t rtt_ms) {
+ for (auto& controller : controllers_) {
+ controller->OnRttUpdate(rtt_ms);
+ }
+}
+
+void Vp8TemporalLayers::OnLossNotification(
+ const VideoEncoder::LossNotification& loss_notification) {
+ for (auto& controller : controllers_) {
+ controller->OnLossNotification(loss_notification);
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.h b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.h
new file mode 100644
index 0000000000..2ffe6eacdf
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_H_
+#define API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_H_
+
+#include <memory>
+#include <vector>
+
+#include "api/fec_controller_override.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/vp8_frame_buffer_controller.h"
+#include "api/video_codecs/vp8_frame_config.h"
+
+namespace webrtc {
+
+// Two different flavors of temporal layers are currently available:
+// kFixedPattern uses a fixed repeating pattern of 1-4 layers.
+// kBitrateDynamic can allocate frames dynamically to 1 or 2 layers, based on
+// the bitrate produced.
+// TODO(eladalon): Remove this enum.
+enum class Vp8TemporalLayersType { kFixedPattern, kBitrateDynamic };
+
+// This interface defines a way of getting the encoder settings needed to
+// realize a temporal layer structure.
+class Vp8TemporalLayers final : public Vp8FrameBufferController {
+ public:
+ Vp8TemporalLayers(
+ std::vector<std::unique_ptr<Vp8FrameBufferController>>&& controllers,
+ FecControllerOverride* fec_controller_override);
+ ~Vp8TemporalLayers() override = default;
+
+ void SetQpLimits(size_t stream_index, int min_qp, int max_qp) override;
+
+ size_t StreamCount() const override;
+
+ bool SupportsEncoderFrameDropping(size_t stream_index) const override;
+
+ void OnRatesUpdated(size_t stream_index,
+ const std::vector<uint32_t>& bitrates_bps,
+ int framerate_fps) override;
+
+ Vp8EncoderConfig UpdateConfiguration(size_t stream_index) override;
+
+ Vp8FrameConfig NextFrameConfig(size_t stream_index,
+ uint32_t rtp_timestamp) override;
+
+ void OnEncodeDone(size_t stream_index,
+ uint32_t rtp_timestamp,
+ size_t size_bytes,
+ bool is_keyframe,
+ int qp,
+ CodecSpecificInfo* info) override;
+
+ void OnFrameDropped(size_t stream_index, uint32_t rtp_timestamp) override;
+
+ void OnPacketLossRateUpdate(float packet_loss_rate) override;
+
+ void OnRttUpdate(int64_t rtt_ms) override;
+
+ void OnLossNotification(
+ const VideoEncoder::LossNotification& loss_notification) override;
+
+ private:
+ std::vector<std::unique_ptr<Vp8FrameBufferController>> controllers_;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_H_
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.cc b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.cc
new file mode 100644
index 0000000000..193494d71d
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/vp8_temporal_layers_factory.h"
+
+#include <algorithm>
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/fec_controller_override.h"
+#include "modules/video_coding/codecs/vp8/default_temporal_layers.h"
+#include "modules/video_coding/codecs/vp8/screenshare_layers.h"
+#include "modules/video_coding/utility/simulcast_utility.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+std::unique_ptr<Vp8FrameBufferController> Vp8TemporalLayersFactory::Create(
+ const VideoCodec& codec,
+ const VideoEncoder::Settings& settings,
+ FecControllerOverride* fec_controller_override) {
+ std::vector<std::unique_ptr<Vp8FrameBufferController>> controllers;
+ const int num_streams = SimulcastUtility::NumberOfSimulcastStreams(codec);
+ RTC_DCHECK_GE(num_streams, 1);
+ controllers.reserve(num_streams);
+
+ for (int i = 0; i < num_streams; ++i) {
+ int num_temporal_layers =
+ SimulcastUtility::NumberOfTemporalLayers(codec, i);
+ RTC_DCHECK_GE(num_temporal_layers, 1);
+ if (SimulcastUtility::IsConferenceModeScreenshare(codec) && i == 0) {
+ // Legacy screenshare layers supports max 2 layers.
+ num_temporal_layers = std::max(2, num_temporal_layers);
+ controllers.push_back(
+ std::make_unique<ScreenshareLayers>(num_temporal_layers));
+ } else {
+ controllers.push_back(
+ std::make_unique<DefaultTemporalLayers>(num_temporal_layers));
+ }
+ }
+
+ return std::make_unique<Vp8TemporalLayers>(std::move(controllers),
+ fec_controller_override);
+}
+
+std::unique_ptr<Vp8FrameBufferControllerFactory>
+Vp8TemporalLayersFactory::Clone() const {
+ return std::make_unique<Vp8TemporalLayersFactory>();
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.h b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.h
new file mode 100644
index 0000000000..7a146f1d4f
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_FACTORY_H_
+#define API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_FACTORY_H_
+
+#include <memory>
+
+#include "api/video_codecs/vp8_temporal_layers.h"
+
+namespace webrtc {
+
+class Vp8TemporalLayersFactory : public Vp8FrameBufferControllerFactory {
+ public:
+ ~Vp8TemporalLayersFactory() override = default;
+
+ std::unique_ptr<Vp8FrameBufferControllerFactory> Clone() const override;
+
+ std::unique_ptr<Vp8FrameBufferController> Create(
+ const VideoCodec& codec,
+ const VideoEncoder::Settings& settings,
+ FecControllerOverride* fec_controller_override) override;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_FACTORY_H_
diff --git a/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory_gn/moz.build b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory_gn/moz.build
new file mode 100644
index 0000000000..2769bef0eb
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory_gn/moz.build
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+UNIFIED_SOURCES += [
+ "/third_party/libwebrtc/api/video_codecs/vp8_temporal_layers_factory.cc"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "log"
+ ]
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+ OS_LIBS += [
+ "rt"
+ ]
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+ OS_LIBS += [
+ "crypt32",
+ "iphlpapi",
+ "secur32",
+ "winmm"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ CXXFLAGS += [
+ "-mfpu=neon"
+ ]
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ CXXFLAGS += [
+ "-msse2"
+ ]
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("vp8_temporal_layers_factory_gn")
diff --git a/third_party/libwebrtc/api/video_codecs/vp9_profile.cc b/third_party/libwebrtc/api/video_codecs/vp9_profile.cc
new file mode 100644
index 0000000000..7e627cc080
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp9_profile.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/vp9_profile.h"
+
+#include <map>
+#include <utility>
+
+#include "rtc_base/string_to_number.h"
+
+namespace webrtc {
+
+// Profile information for VP9 video.
+const char kVP9FmtpProfileId[] = "profile-id";
+
+std::string VP9ProfileToString(VP9Profile profile) {
+ switch (profile) {
+ case VP9Profile::kProfile0:
+ return "0";
+ case VP9Profile::kProfile1:
+ return "1";
+ case VP9Profile::kProfile2:
+ return "2";
+ case VP9Profile::kProfile3:
+ return "3";
+ }
+ return "0";
+}
+
+absl::optional<VP9Profile> StringToVP9Profile(const std::string& str) {
+ const absl::optional<int> i = rtc::StringToNumber<int>(str);
+ if (!i.has_value())
+ return absl::nullopt;
+
+ switch (i.value()) {
+ case 0:
+ return VP9Profile::kProfile0;
+ case 1:
+ return VP9Profile::kProfile1;
+ case 2:
+ return VP9Profile::kProfile2;
+ case 3:
+ return VP9Profile::kProfile3;
+ default:
+ return absl::nullopt;
+ }
+}
+
+absl::optional<VP9Profile> ParseSdpForVP9Profile(
+ const SdpVideoFormat::Parameters& params) {
+ const auto profile_it = params.find(kVP9FmtpProfileId);
+ if (profile_it == params.end())
+ return VP9Profile::kProfile0;
+ const std::string& profile_str = profile_it->second;
+ return StringToVP9Profile(profile_str);
+}
+
+bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2) {
+ const absl::optional<VP9Profile> profile = ParseSdpForVP9Profile(params1);
+ const absl::optional<VP9Profile> other_profile =
+ ParseSdpForVP9Profile(params2);
+ return profile && other_profile && profile == other_profile;
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/api/video_codecs/vp9_profile.h b/third_party/libwebrtc/api/video_codecs/vp9_profile.h
new file mode 100644
index 0000000000..b570bc3bb6
--- /dev/null
+++ b/third_party/libwebrtc/api/video_codecs/vp9_profile.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_CODECS_VP9_PROFILE_H_
+#define API_VIDEO_CODECS_VP9_PROFILE_H_
+
+#include <string>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "rtc_base/system/rtc_export.h"
+
+namespace webrtc {
+
+// Profile information for VP9 video.
+extern RTC_EXPORT const char kVP9FmtpProfileId[];
+
+enum class VP9Profile {
+ kProfile0,
+ kProfile1,
+ kProfile2,
+ kProfile3,
+};
+
+// Helper functions to convert VP9Profile to std::string. Returns "0" by
+// default.
+RTC_EXPORT std::string VP9ProfileToString(VP9Profile profile);
+
+// Helper functions to convert std::string to VP9Profile. Returns null if given
+// an invalid profile string.
+absl::optional<VP9Profile> StringToVP9Profile(const std::string& str);
+
+// Parse profile that is represented as a string of single digit contained in an
+// SDP key-value map. A default profile(kProfile0) will be returned if the
+// profile key is missing. Nothing will be returned if the key is present but
+// the string is invalid.
+RTC_EXPORT absl::optional<VP9Profile> ParseSdpForVP9Profile(
+ const SdpVideoFormat::Parameters& params);
+
+// Returns true if the parameters have the same VP9 profile, or neither contains
+// VP9 profile.
+bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1,
+ const SdpVideoFormat::Parameters& params2);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_CODECS_VP9_PROFILE_H_
diff --git a/third_party/libwebrtc/api/video_track_source_constraints.h b/third_party/libwebrtc/api/video_track_source_constraints.h
new file mode 100644
index 0000000000..55e5396d62
--- /dev/null
+++ b/third_party/libwebrtc/api/video_track_source_constraints.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces for MediaStream, MediaTrack and MediaSource.
+// These interfaces are used for implementing MediaStream and MediaTrack as
+// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
+// interfaces must be used only with PeerConnection.
+
+#ifndef API_VIDEO_TRACK_SOURCE_CONSTRAINTS_H_
+#define API_VIDEO_TRACK_SOURCE_CONSTRAINTS_H_
+
+#include "absl/types/optional.h"
+
+namespace webrtc {
+
+// This struct definition describes constraints on the video source that may be
+// set with VideoTrackSourceInterface::ProcessConstraints.
+struct VideoTrackSourceConstraints {
+ absl::optional<double> min_fps;
+ absl::optional<double> max_fps;
+};
+
+} // namespace webrtc
+
+#endif // API_VIDEO_TRACK_SOURCE_CONSTRAINTS_H_
diff --git a/third_party/libwebrtc/api/video_track_source_constraints_gn/moz.build b/third_party/libwebrtc/api/video_track_source_constraints_gn/moz.build
new file mode 100644
index 0000000000..77f7ba4d70
--- /dev/null
+++ b/third_party/libwebrtc/api/video_track_source_constraints_gn/moz.build
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+
+COMPILE_FLAGS["OS_INCLUDES"] = []
+AllowCompilerWarnings()
+
+DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1"
+DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True
+DEFINES["RTC_ENABLE_VP9"] = True
+DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0"
+DEFINES["WEBRTC_LIBRARY_IMPL"] = True
+DEFINES["WEBRTC_MOZILLA_BUILD"] = True
+DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0"
+DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0"
+
+FINAL_LIBRARY = "webrtc"
+
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "!/third_party/libwebrtc/gen",
+ "/ipc/chromium/src",
+ "/third_party/libwebrtc/",
+ "/third_party/libwebrtc/third_party/abseil-cpp/",
+ "/tools/profiler/public"
+]
+
+if not CONFIG["MOZ_DEBUG"]:
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
+ DEFINES["NDEBUG"] = True
+ DEFINES["NVALGRIND"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1":
+
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+
+if CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["ANDROID"] = True
+ DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1"
+ DEFINES["HAVE_SYS_UIO_H"] = True
+ DEFINES["WEBRTC_ANDROID"] = True
+ DEFINES["WEBRTC_ANDROID_OPENSLES"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_GNU_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["WEBRTC_MAC"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True
+ DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0"
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_NSS_CERTS"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_UDEV"] = True
+ DEFINES["WEBRTC_LINUX"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["USE_GLIB"] = "1"
+ DEFINES["USE_OZONE"] = "1"
+ DEFINES["USE_X11"] = "1"
+ DEFINES["WEBRTC_BSD"] = True
+ DEFINES["WEBRTC_POSIX"] = True
+ DEFINES["_FILE_OFFSET_BITS"] = "64"
+ DEFINES["_LARGEFILE64_SOURCE"] = True
+ DEFINES["_LARGEFILE_SOURCE"] = True
+ DEFINES["__STDC_CONSTANT_MACROS"] = True
+ DEFINES["__STDC_FORMAT_MACROS"] = True
+
+if CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True
+ DEFINES["NOMINMAX"] = True
+ DEFINES["NTDDI_VERSION"] = "0x0A000000"
+ DEFINES["PSAPI_VERSION"] = "2"
+ DEFINES["UNICODE"] = True
+ DEFINES["USE_AURA"] = "1"
+ DEFINES["WEBRTC_WIN"] = True
+ DEFINES["WIN32"] = True
+ DEFINES["WIN32_LEAN_AND_MEAN"] = True
+ DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP"
+ DEFINES["WINVER"] = "0x0A00"
+ DEFINES["_ATL_NO_OPENGL"] = True
+ DEFINES["_CRT_RAND_S"] = True
+ DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True
+ DEFINES["_HAS_EXCEPTIONS"] = "0"
+ DEFINES["_HAS_NODISCARD"] = True
+ DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True
+ DEFINES["_SECURE_ATL"] = True
+ DEFINES["_UNICODE"] = True
+ DEFINES["_WIN32_WINNT"] = "0x0A00"
+ DEFINES["_WINDOWS"] = True
+ DEFINES["__STD_C"] = True
+
+if CONFIG["CPU_ARCH"] == "aarch64":
+
+ DEFINES["WEBRTC_ARCH_ARM64"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "arm":
+
+ DEFINES["WEBRTC_ARCH_ARM"] = True
+ DEFINES["WEBRTC_ARCH_ARM_V7"] = True
+ DEFINES["WEBRTC_HAS_NEON"] = True
+
+if CONFIG["CPU_ARCH"] == "mips32":
+
+ DEFINES["MIPS32_LE"] = True
+ DEFINES["MIPS_FPU_LE"] = True
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "mips64":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64":
+
+ DEFINES["WEBRTC_ENABLE_AVX2"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD":
+
+ DEFINES["_DEBUG"] = True
+
+if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT":
+
+ DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0"
+
+if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["USE_X11"] = "1"
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support",
+ "unwind"
+ ]
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android":
+
+ OS_LIBS += [
+ "android_support"
+ ]
+
+if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux":
+
+ DEFINES["_GNU_SOURCE"] = True
+
+Library("video_track_source_constraints_gn")
diff --git a/third_party/libwebrtc/api/video_track_source_proxy_factory.h b/third_party/libwebrtc/api/video_track_source_proxy_factory.h
new file mode 100644
index 0000000000..eb6e96429a
--- /dev/null
+++ b/third_party/libwebrtc/api/video_track_source_proxy_factory.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_
+#define API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_
+
+#include "api/media_stream_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+
+// Creates a proxy source for `source` which makes sure the real
+// VideoTrackSourceInterface implementation is destroyed on the signaling thread
+// and marshals calls to `worker_thread` and `signaling_thread`.
+rtc::scoped_refptr<VideoTrackSourceInterface> RTC_EXPORT
+CreateVideoTrackSourceProxy(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ VideoTrackSourceInterface* source);
+
+} // namespace webrtc
+
+#endif // API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_