summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android/src/jni
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/sdk/android/src/jni')
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/DEPS15
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/OWNERS4
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc50
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc53
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc686
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h198
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc167
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h98
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS4
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS1
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc247
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h154
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc234
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h134
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc501
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h129
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc650
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h102
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc267
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h140
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc271
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h129
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc144
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h92
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc446
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h199
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc445
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h193
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc117
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/encoded_image.h45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc63
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_common.cc45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h168
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jvm.cc133
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/jvm.h32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc42
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc80
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc72
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h12
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc26
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc73
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc155
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc55
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc259
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h89
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc59
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc24
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc152
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h54
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc67
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h60
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc917
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h141
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc550
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h33
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc59
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h33
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc161
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc211
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h35
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc127
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc114
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h29
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc176
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h46
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc81
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h69
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc48
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc44
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc74
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc35
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/video.cc55
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/pc/video.h45
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h49
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc46
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc37
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc25
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h27
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h41
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc273
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h117
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc130
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h51
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc39
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc490
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h133
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_frame.cc319
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_frame.h43
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_sink.cc32
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_sink.h36
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/video_track.cc49
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc30
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc38
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc40
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h31
-rw-r--r--third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc158
133 files changed, 14320 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/src/jni/DEPS b/third_party/libwebrtc/sdk/android/src/jni/DEPS
new file mode 100644
index 0000000000..ae33fa6830
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/DEPS
@@ -0,0 +1,15 @@
+include_rules = [
+ "+third_party/libyuv",
+ "+call/callfactoryinterface.h",
+ "+common_video",
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media/base",
+ "+media/engine",
+ "+modules/audio_device/include/audio_device.h",
+ "+modules/audio_processing/include/audio_processing.h",
+ "+modules/include",
+ "+modules/utility/include/jvm_android.h",
+ "+modules/video_coding",
+ "+pc",
+ "+system_wrappers/include",
+]
diff --git a/third_party/libwebrtc/sdk/android/src/jni/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/OWNERS
new file mode 100644
index 0000000000..557373424b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/OWNERS
@@ -0,0 +1,4 @@
+per-file androidhistogram.cc=xalep@webrtc.org
+per-file androidmetrics.cc=xalep@webrtc.org
+per-file androidvideotracksource.*=xalep@webrtc.org
+per-file androidvideotracksource.cc=xalep@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc
new file mode 100644
index 0000000000..498f143743
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_histogram.cc
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+
+#include "sdk/android/generated_base_jni/Histogram_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+// Enables collection of native histograms and creating them.
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_Histogram_CreateCounts(JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name,
+ jint min,
+ jint max,
+ jint buckets) {
+ std::string name = JavaToStdString(jni, j_name);
+ return jlongFromPointer(
+ metrics::HistogramFactoryGetCounts(name, min, max, buckets));
+}
+
+static jlong JNI_Histogram_CreateEnumeration(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name,
+ jint max) {
+ std::string name = JavaToStdString(jni, j_name);
+ return jlongFromPointer(metrics::HistogramFactoryGetEnumeration(name, max));
+}
+
+static void JNI_Histogram_AddSample(JNIEnv* jni,
+ jlong histogram,
+ jint sample) {
+ if (histogram) {
+ HistogramAdd(reinterpret_cast<metrics::Histogram*>(histogram), sample);
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc
new file mode 100644
index 0000000000..01398cc77f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_metrics.cc
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+
+#include "rtc_base/string_utils.h"
+#include "sdk/android/generated_metrics_jni/Metrics_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+// Enables collection of native histograms and creating them.
+namespace webrtc {
+namespace jni {
+
+static void JNI_Metrics_Enable(JNIEnv* jni) {
+ metrics::Enable();
+}
+
+// Gets and clears native histograms.
+static ScopedJavaLocalRef<jobject> JNI_Metrics_GetAndReset(JNIEnv* jni) {
+ ScopedJavaLocalRef<jobject> j_metrics = Java_Metrics_Constructor(jni);
+
+ std::map<std::string, std::unique_ptr<metrics::SampleInfo>,
+ rtc::AbslStringViewCmp>
+ histograms;
+ metrics::GetAndReset(&histograms);
+ for (const auto& kv : histograms) {
+ // Create and add samples to `HistogramInfo`.
+ ScopedJavaLocalRef<jobject> j_info = Java_HistogramInfo_Constructor(
+ jni, kv.second->min, kv.second->max,
+ static_cast<int>(kv.second->bucket_count));
+ for (const auto& sample : kv.second->samples) {
+ Java_HistogramInfo_addSample(jni, j_info, sample.first, sample.second);
+ }
+ // Add `HistogramInfo` to `Metrics`.
+ ScopedJavaLocalRef<jstring> j_name = NativeToJavaString(jni, kv.first);
+ Java_Metrics_add(jni, j_metrics, j_name, j_info);
+ }
+ CHECK_EXCEPTION(jni);
+ return j_metrics;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc
new file mode 100644
index 0000000000..539d41487e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.cc
@@ -0,0 +1,686 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_network_monitor.h"
+
+#include <dlfcn.h>
+
+#include "absl/strings/string_view.h"
+#ifndef RTLD_NOLOAD
+// This was added in Lollipop to dlfcn.h
+#define RTLD_NOLOAD 4
+#endif
+
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "sdk/android/generated_base_jni/NetworkChangeDetector_jni.h"
+#include "sdk/android/generated_base_jni/NetworkMonitor_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+const char* NetworkTypeToString(NetworkType type) {
+ switch (type) {
+ case NETWORK_UNKNOWN:
+ return "UNKNOWN";
+ case NETWORK_ETHERNET:
+ return "ETHERNET";
+ case NETWORK_WIFI:
+ return "WIFI";
+ case NETWORK_5G:
+ return "5G";
+ case NETWORK_4G:
+ return "4G";
+ case NETWORK_3G:
+ return "3G";
+ case NETWORK_2G:
+ return "2G";
+ case NETWORK_UNKNOWN_CELLULAR:
+ return "UNKNOWN_CELLULAR";
+ case NETWORK_BLUETOOTH:
+ return "BLUETOOTH";
+ case NETWORK_VPN:
+ return "VPN";
+ case NETWORK_NONE:
+ return "NONE";
+ }
+}
+
+} // namespace
+
+enum AndroidSdkVersion {
+ SDK_VERSION_LOLLIPOP = 21,
+ SDK_VERSION_MARSHMALLOW = 23
+};
+
+static NetworkType GetNetworkTypeFromJava(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_network_type);
+ if (enum_name == "CONNECTION_UNKNOWN") {
+ return NetworkType::NETWORK_UNKNOWN;
+ }
+ if (enum_name == "CONNECTION_ETHERNET") {
+ return NetworkType::NETWORK_ETHERNET;
+ }
+ if (enum_name == "CONNECTION_WIFI") {
+ return NetworkType::NETWORK_WIFI;
+ }
+ if (enum_name == "CONNECTION_5G") {
+ return NetworkType::NETWORK_5G;
+ }
+ if (enum_name == "CONNECTION_4G") {
+ return NetworkType::NETWORK_4G;
+ }
+ if (enum_name == "CONNECTION_3G") {
+ return NetworkType::NETWORK_3G;
+ }
+ if (enum_name == "CONNECTION_2G") {
+ return NetworkType::NETWORK_2G;
+ }
+ if (enum_name == "CONNECTION_UNKNOWN_CELLULAR") {
+ return NetworkType::NETWORK_UNKNOWN_CELLULAR;
+ }
+ if (enum_name == "CONNECTION_BLUETOOTH") {
+ return NetworkType::NETWORK_BLUETOOTH;
+ }
+ if (enum_name == "CONNECTION_VPN") {
+ return NetworkType::NETWORK_VPN;
+ }
+ if (enum_name == "CONNECTION_NONE") {
+ return NetworkType::NETWORK_NONE;
+ }
+ RTC_DCHECK_NOTREACHED();
+ return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(
+ NetworkType network_type,
+ bool surface_cellular_types) {
+ switch (network_type) {
+ case NETWORK_UNKNOWN:
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_ETHERNET:
+ return rtc::ADAPTER_TYPE_ETHERNET;
+ case NETWORK_WIFI:
+ return rtc::ADAPTER_TYPE_WIFI;
+ case NETWORK_5G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_5G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_4G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_4G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_3G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_3G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_2G:
+ return surface_cellular_types ? rtc::ADAPTER_TYPE_CELLULAR_2G
+ : rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_UNKNOWN_CELLULAR:
+ return rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_VPN:
+ return rtc::ADAPTER_TYPE_VPN;
+ case NETWORK_BLUETOOTH:
+ // There is no corresponding mapping for bluetooth networks.
+ // Map it to UNKNOWN for now.
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_NONE:
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ }
+
+ RTC_DCHECK_NOTREACHED() << "Invalid network type " << network_type;
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+}
+
+static rtc::IPAddress JavaToNativeIpAddress(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ip_address) {
+ std::vector<int8_t> address =
+ JavaToNativeByteArray(jni, Java_IPAddress_getAddress(jni, j_ip_address));
+ size_t address_length = address.size();
+ if (address_length == 4) {
+ // IP4
+ struct in_addr ip4_addr;
+ memcpy(&ip4_addr.s_addr, address.data(), 4);
+ return rtc::IPAddress(ip4_addr);
+ }
+ // IP6
+ RTC_CHECK(address_length == 16);
+ struct in6_addr ip6_addr;
+ memcpy(ip6_addr.s6_addr, address.data(), address_length);
+ return rtc::IPAddress(ip6_addr);
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_info) {
+ NetworkInformation network_info;
+ network_info.interface_name = JavaToStdString(
+ jni, Java_NetworkInformation_getName(jni, j_network_info));
+ network_info.handle = static_cast<NetworkHandle>(
+ Java_NetworkInformation_getHandle(jni, j_network_info));
+ network_info.type = GetNetworkTypeFromJava(
+ jni, Java_NetworkInformation_getConnectionType(jni, j_network_info));
+ network_info.underlying_type_for_vpn = GetNetworkTypeFromJava(
+ jni, Java_NetworkInformation_getUnderlyingConnectionTypeForVpn(
+ jni, j_network_info));
+ ScopedJavaLocalRef<jobjectArray> j_ip_addresses =
+ Java_NetworkInformation_getIpAddresses(jni, j_network_info);
+ network_info.ip_addresses = JavaToNativeVector<rtc::IPAddress>(
+ jni, j_ip_addresses, &JavaToNativeIpAddress);
+ return network_info;
+}
+
+static bool AddressMatch(const rtc::IPAddress& ip1, const rtc::IPAddress& ip2) {
+ if (ip1.family() != ip2.family()) {
+ return false;
+ }
+ if (ip1.family() == AF_INET) {
+ return ip1.ipv4_address().s_addr == ip2.ipv4_address().s_addr;
+ }
+ if (ip1.family() == AF_INET6) {
+ // The last 64-bits of an ipv6 address are temporary address and it could
+ // change over time. So we only compare the first 64-bits.
+ return memcmp(ip1.ipv6_address().s6_addr, ip2.ipv6_address().s6_addr,
+ sizeof(in6_addr) / 2) == 0;
+ }
+ return false;
+}
+
+NetworkInformation::NetworkInformation() = default;
+
+NetworkInformation::NetworkInformation(const NetworkInformation&) = default;
+
+NetworkInformation::NetworkInformation(NetworkInformation&&) = default;
+
+NetworkInformation::~NetworkInformation() = default;
+
+NetworkInformation& NetworkInformation::operator=(const NetworkInformation&) =
+ default;
+
+NetworkInformation& NetworkInformation::operator=(NetworkInformation&&) =
+ default;
+
+std::string NetworkInformation::ToString() const {
+ rtc::StringBuilder ss;
+ ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+ << type;
+ if (type == NETWORK_VPN) {
+ ss << "; underlying_type_for_vpn " << underlying_type_for_vpn;
+ }
+ ss << "]";
+ return ss.Release();
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_application_context,
+ const FieldTrialsView& field_trials)
+ : android_sdk_int_(Java_NetworkMonitor_androidSdkInt(env)),
+ j_application_context_(env, j_application_context),
+ j_network_monitor_(env, Java_NetworkMonitor_getInstance(env)),
+ network_thread_(rtc::Thread::Current()),
+ field_trials_(field_trials) {}
+
+AndroidNetworkMonitor::~AndroidNetworkMonitor() {
+ RTC_DCHECK(!started_);
+}
+
+void AndroidNetworkMonitor::Start() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (started_) {
+ return;
+ }
+ reset();
+ started_ = true;
+ surface_cellular_types_ =
+ field_trials_.IsEnabled("WebRTC-SurfaceCellularTypes");
+ find_network_handle_without_ipv6_temporary_part_ = field_trials_.IsEnabled(
+ "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart");
+ bind_using_ifname_ =
+ !field_trials_.IsDisabled("WebRTC-BindUsingInterfaceName");
+ disable_is_adapter_available_ = field_trials_.IsDisabled(
+ "WebRTC-AndroidNetworkMonitor-IsAdapterAvailable");
+
+ // This pointer is also accessed by the methods called from java threads.
+ // Assigning it here is safe, because the java monitor is in a stopped state,
+ // and will not make any callbacks.
+ safety_flag_ = PendingTaskSafetyFlag::Create();
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_NetworkMonitor_startMonitoring(
+ env, j_network_monitor_, j_application_context_, jlongFromPointer(this),
+ NativeToJavaString(
+ env, field_trials_.Lookup("WebRTC-NetworkMonitorAutoDetect")));
+}
+
+void AndroidNetworkMonitor::reset() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ network_handle_by_address_.clear();
+ network_handle_by_if_name_.clear();
+ network_info_by_handle_.clear();
+ network_preference_by_adapter_type_.clear();
+}
+
+void AndroidNetworkMonitor::Stop() {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ if (!started_) {
+ return;
+ }
+ started_ = false;
+ find_network_handle_without_ipv6_temporary_part_ = false;
+
+ // Cancel any pending tasks. We should not call
+ // `InvokeNetworksChangedCallback()` when the monitor is stopped.
+ safety_flag_->SetNotAlive();
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_NetworkMonitor_stopMonitoring(env, j_network_monitor_,
+ jlongFromPointer(this));
+
+ reset();
+}
+
+// The implementation is largely taken from UDPSocketPosix::BindToNetwork in
+// https://cs.chromium.org/chromium/src/net/udp/udp_socket_posix.cc
+rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork(
+ int socket_fd,
+ const rtc::IPAddress& address,
+ absl::string_view if_name) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // Android prior to Lollipop didn't have support for binding sockets to
+ // networks. This may also occur if there is no connectivity manager
+ // service.
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ const bool network_binding_supported =
+ Java_NetworkMonitor_networkBindingSupported(env, j_network_monitor_);
+ if (!network_binding_supported) {
+ RTC_LOG(LS_WARNING)
+ << "BindSocketToNetwork is not supported on this platform "
+ "(Android SDK: "
+ << android_sdk_int_ << ")";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+
+ absl::optional<NetworkHandle> network_handle =
+ FindNetworkHandleFromAddressOrName(address, if_name);
+ if (!network_handle) {
+ RTC_LOG(LS_WARNING)
+ << "BindSocketToNetwork unable to find network handle for"
+ << " addr: " << address.ToSensitiveString() << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::ADDRESS_NOT_FOUND;
+ }
+
+ if (*network_handle == 0 /* NETWORK_UNSPECIFIED */) {
+ RTC_LOG(LS_WARNING) << "BindSocketToNetwork 0 network handle for"
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+
+ int rv = 0;
+ if (android_sdk_int_ >= SDK_VERSION_MARSHMALLOW) {
+ // See declaration of android_setsocknetwork() here:
+ // http://androidxref.com/6.0.0_r1/xref/development/ndk/platforms/android-M/include/android/multinetwork.h#65
+ // Function cannot be called directly as it will cause app to fail to load
+ // on pre-marshmallow devices.
+ typedef int (*MarshmallowSetNetworkForSocket)(NetworkHandle net,
+ int socket);
+ static MarshmallowSetNetworkForSocket marshmallowSetNetworkForSocket;
+ // This is not thread-safe, but we are running this only on the worker
+ // thread.
+ if (!marshmallowSetNetworkForSocket) {
+ const std::string android_native_lib_path = "libandroid.so";
+ void* lib = dlopen(android_native_lib_path.c_str(), RTLD_NOW);
+ if (lib == nullptr) {
+ RTC_LOG(LS_ERROR) << "Library " << android_native_lib_path
+ << " not found!";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ marshmallowSetNetworkForSocket =
+ reinterpret_cast<MarshmallowSetNetworkForSocket>(
+ dlsym(lib, "android_setsocknetwork"));
+ }
+ if (!marshmallowSetNetworkForSocket) {
+ RTC_LOG(LS_ERROR) << "Symbol marshmallowSetNetworkForSocket is not found";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ rv = marshmallowSetNetworkForSocket(*network_handle, socket_fd);
+ } else {
+ // NOTE: This relies on Android implementation details, but it won't
+ // change because Lollipop is already released.
+ typedef int (*LollipopSetNetworkForSocket)(unsigned net, int socket);
+ static LollipopSetNetworkForSocket lollipopSetNetworkForSocket;
+ // This is not threadsafe, but we are running this only on the worker
+ // thread.
+ if (!lollipopSetNetworkForSocket) {
+ // Android's netd client library should always be loaded in our address
+ // space as it shims libc functions like connect().
+ const std::string net_library_path = "libnetd_client.so";
+ // Use RTLD_NOW to match Android's prior loading of the library:
+ // http://androidxref.com/6.0.0_r5/xref/bionic/libc/bionic/NetdClient.cpp#37
+ // Use RTLD_NOLOAD to assert that the library is already loaded and
+ // avoid doing any disk IO.
+ void* lib = dlopen(net_library_path.c_str(), RTLD_NOW | RTLD_NOLOAD);
+ if (lib == nullptr) {
+ RTC_LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ lollipopSetNetworkForSocket =
+ reinterpret_cast<LollipopSetNetworkForSocket>(
+ dlsym(lib, "setNetworkForSocket"));
+ }
+ if (!lollipopSetNetworkForSocket) {
+ RTC_LOG(LS_ERROR) << "Symbol lollipopSetNetworkForSocket is not found ";
+ return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
+ }
+ rv = lollipopSetNetworkForSocket(*network_handle, socket_fd);
+ }
+
+ // If `network` has since disconnected, `rv` will be ENONET. Surface this as
+ // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+ // the less descriptive ERR_FAILED.
+ if (rv == 0) {
+ RTC_LOG(LS_VERBOSE) << "BindSocketToNetwork bound network handle for"
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ return rtc::NetworkBindingResult::SUCCESS;
+ }
+
+ RTC_LOG(LS_WARNING) << "BindSocketToNetwork got error: " << rv
+ << " addr: " << address.ToSensitiveString()
+ << " ifname: " << if_name;
+ if (rv == ENONET) {
+ return rtc::NetworkBindingResult::NETWORK_CHANGED;
+ }
+
+ return rtc::NetworkBindingResult::FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_n(
+ const NetworkInformation& network_info) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+ network_info_by_handle_[network_info.handle] = network_info;
+ for (const rtc::IPAddress& address : network_info.ip_addresses) {
+ network_handle_by_address_[address] = network_info.handle;
+ }
+ network_handle_by_if_name_[network_info.interface_name] = network_info.handle;
+ RTC_CHECK(network_info_by_handle_.size() >=
+ network_handle_by_if_name_.size());
+ InvokeNetworksChangedCallback();
+}
+
+absl::optional<NetworkHandle>
+AndroidNetworkMonitor::FindNetworkHandleFromAddressOrName(
+ const rtc::IPAddress& ip_address,
+ absl::string_view if_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Find network handle.";
+ if (find_network_handle_without_ipv6_temporary_part_) {
+ for (auto const& iter : network_info_by_handle_) {
+ const std::vector<rtc::IPAddress>& addresses = iter.second.ip_addresses;
+ auto address_it = std::find_if(addresses.begin(), addresses.end(),
+ [ip_address](rtc::IPAddress address) {
+ return AddressMatch(ip_address, address);
+ });
+ if (address_it != addresses.end()) {
+ return absl::make_optional(iter.first);
+ }
+ }
+ } else {
+ auto iter = network_handle_by_address_.find(ip_address);
+ if (iter != network_handle_by_address_.end()) {
+ return absl::make_optional(iter->second);
+ }
+ }
+
+ return FindNetworkHandleFromIfname(if_name);
+}
+
+absl::optional<NetworkHandle>
+AndroidNetworkMonitor::FindNetworkHandleFromIfname(
+ absl::string_view if_name) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ auto iter = network_handle_by_if_name_.find(if_name);
+ if (iter != network_handle_by_if_name_.end()) {
+ return iter->second;
+ }
+
+ if (bind_using_ifname_) {
+ for (auto const& iter : network_handle_by_if_name_) {
+ // Use substring match so that e.g if_name="v4-wlan0" is matched
+ // agains iter="wlan0"
+ if (if_name.find(iter.first) != absl::string_view::npos) {
+ return absl::make_optional(iter.second);
+ }
+ }
+ }
+
+ return absl::nullopt;
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_n(NetworkHandle handle) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Network disconnected for handle " << handle;
+ auto iter = network_info_by_handle_.find(handle);
+ if (iter == network_info_by_handle_.end()) {
+ return;
+ }
+
+ for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+ network_handle_by_address_.erase(address);
+ }
+
+ // We've discovered that the if_name is not always unique,
+ // i.e it can be several network conencted with same if_name.
+ //
+ // This is handled the following way,
+ // 1) OnNetworkConnected_n overwrites any previous "owner" of an interface
+ // name ("owner" == entry in network_handle_by_if_name_).
+ // 2) OnNetworkDisconnected_n, we scan and see if there are any remaining
+ // connected network with the interface name, and set it as owner.
+ //
+ // This means that network_info_by_handle can have more entries than
+ // network_handle_by_if_name_.
+
+ // Check if we are registered as "owner" of if_name.
+ const auto& if_name = iter->second.interface_name;
+ auto iter2 = network_handle_by_if_name_.find(if_name);
+ RTC_DCHECK(iter2 != network_handle_by_if_name_.end());
+ if (iter2 != network_handle_by_if_name_.end() && iter2->second == handle) {
+ // We are owner...
+ // Check if there is someone else we can set as owner.
+ bool found = false;
+ for (const auto& info : network_info_by_handle_) {
+ if (info.first == handle) {
+ continue;
+ }
+ if (info.second.interface_name == if_name) {
+ found = true;
+ network_handle_by_if_name_[if_name] = info.first;
+ break;
+ }
+ }
+ if (!found) {
+ // No new owner...
+ network_handle_by_if_name_.erase(iter2);
+ }
+ } else {
+ // We are not owner...don't do anything.
+#if RTC_DCHECK_IS_ON
+ auto owner_handle = FindNetworkHandleFromIfname(if_name);
+ RTC_DCHECK(owner_handle && *owner_handle != handle);
+#endif
+ }
+
+ network_info_by_handle_.erase(iter);
+}
+
+void AndroidNetworkMonitor::OnNetworkPreference_n(
+ NetworkType type,
+ rtc::NetworkPreference preference) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ RTC_LOG(LS_INFO) << "Android network monitor preference for "
+ << NetworkTypeToString(type) << " changed to "
+ << rtc::NetworkPreferenceToString(preference);
+ auto adapter_type = AdapterTypeFromNetworkType(type, surface_cellular_types_);
+ network_preference_by_adapter_type_[adapter_type] = preference;
+ InvokeNetworksChangedCallback();
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+ const std::vector<NetworkInformation>& network_infos) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+
+ // We expect this method to be called once directly after startMonitoring.
+ // All the caches should be empty.
+ RTC_DCHECK(network_handle_by_if_name_.empty());
+ RTC_DCHECK(network_handle_by_address_.empty());
+ RTC_DCHECK(network_info_by_handle_.empty());
+ RTC_DCHECK(network_preference_by_adapter_type_.empty());
+
+ // ...but reset just in case.
+ reset();
+ RTC_LOG(LS_INFO) << "Android network monitor found " << network_infos.size()
+ << " networks";
+ for (const NetworkInformation& network : network_infos) {
+ OnNetworkConnected_n(network);
+ }
+}
+
+rtc::NetworkMonitorInterface::InterfaceInfo
+AndroidNetworkMonitor::GetInterfaceInfo(absl::string_view if_name) {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto handle = FindNetworkHandleFromIfname(if_name);
+ if (!handle) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = (disable_is_adapter_available_ ? true : false),
+ };
+ }
+ auto iter = network_info_by_handle_.find(*handle);
+ RTC_DCHECK(iter != network_info_by_handle_.end());
+ if (iter == network_info_by_handle_.end()) {
+ return {
+ .adapter_type = rtc::ADAPTER_TYPE_UNKNOWN,
+ .available = (disable_is_adapter_available_ ? true : false),
+ };
+ }
+
+ auto type =
+ AdapterTypeFromNetworkType(iter->second.type, surface_cellular_types_);
+ auto vpn_type =
+ (type == rtc::ADAPTER_TYPE_VPN)
+ ? AdapterTypeFromNetworkType(iter->second.underlying_type_for_vpn,
+ surface_cellular_types_)
+ : rtc::ADAPTER_TYPE_UNKNOWN;
+ return {
+ .adapter_type = type,
+ .underlying_type_for_vpn = vpn_type,
+ .network_preference = GetNetworkPreference(type),
+ .available = true,
+ };
+}
+
+rtc::NetworkPreference AndroidNetworkMonitor::GetNetworkPreference(
+ rtc::AdapterType adapter_type) const {
+ RTC_DCHECK_RUN_ON(network_thread_);
+ auto preference_iter = network_preference_by_adapter_type_.find(adapter_type);
+ if (preference_iter == network_preference_by_adapter_type_.end()) {
+ return rtc::NetworkPreference::NEUTRAL;
+ }
+
+ return preference_iter->second;
+}
+
+AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory()
+ : j_application_context_(nullptr) {}
+
+AndroidNetworkMonitorFactory::AndroidNetworkMonitorFactory(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_application_context)
+ : j_application_context_(env, j_application_context) {}
+
+AndroidNetworkMonitorFactory::~AndroidNetworkMonitorFactory() = default;
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) {
+ return new AndroidNetworkMonitor(AttachCurrentThreadIfNeeded(),
+ j_application_context_, field_trials);
+}
+
+void AndroidNetworkMonitor::NotifyConnectionTypeChanged(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller) {
+ network_thread_->PostTask(SafeTask(safety_flag_, [this] {
+ RTC_LOG(LS_INFO)
+ << "Android network monitor detected connection type change.";
+ InvokeNetworksChangedCallback();
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfActiveNetworkList(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobjectArray>& j_network_infos) {
+ std::vector<NetworkInformation> network_infos =
+ JavaToNativeVector<NetworkInformation>(env, j_network_infos,
+ &GetNetworkInformationFromJava);
+ SetNetworkInfos(network_infos);
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkConnect(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_network_info) {
+ NetworkInformation network_info =
+ GetNetworkInformationFromJava(env, j_network_info);
+ network_thread_->PostTask(
+ SafeTask(safety_flag_, [this, network_info = std::move(network_info)] {
+ OnNetworkConnected_n(network_info);
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkDisconnect(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ jlong network_handle) {
+ network_thread_->PostTask(SafeTask(safety_flag_, [this, network_handle] {
+ OnNetworkDisconnected_n(static_cast<NetworkHandle>(network_handle));
+ }));
+}
+
+void AndroidNetworkMonitor::NotifyOfNetworkPreference(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_connection_type,
+ jint jpreference) {
+ NetworkType type = GetNetworkTypeFromJava(env, j_connection_type);
+ rtc::NetworkPreference preference =
+ static_cast<rtc::NetworkPreference>(jpreference);
+
+ network_thread_->PostTask(SafeTask(safety_flag_, [this, type, preference] {
+ OnNetworkPreference_n(type, preference);
+ }));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h
new file mode 100644
index 0000000000..d0aad5ea76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_network_monitor.h
@@ -0,0 +1,198 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
+#define SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "absl/types/optional.h"
+#include "api/field_trials_view.h"
+#include "api/task_queue/pending_task_safety_flag.h"
+#include "rtc_base/network_monitor.h"
+#include "rtc_base/network_monitor_factory.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+class AndroidNetworkMonitorTest;
+} // namespace test
+
+namespace jni {
+
+typedef int64_t NetworkHandle;
+
+// c++ equivalent of java NetworkChangeDetector.ConnectionType.
+enum NetworkType {
+ NETWORK_UNKNOWN,
+ NETWORK_ETHERNET,
+ NETWORK_WIFI,
+ NETWORK_5G,
+ NETWORK_4G,
+ NETWORK_3G,
+ NETWORK_2G,
+ NETWORK_UNKNOWN_CELLULAR,
+ NETWORK_BLUETOOTH,
+ NETWORK_VPN,
+ NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+ std::string interface_name;
+ NetworkHandle handle;
+ NetworkType type;
+ NetworkType underlying_type_for_vpn;
+ std::vector<rtc::IPAddress> ip_addresses;
+
+ NetworkInformation();
+ NetworkInformation(const NetworkInformation&);
+ NetworkInformation(NetworkInformation&&);
+ ~NetworkInformation();
+ NetworkInformation& operator=(const NetworkInformation&);
+ NetworkInformation& operator=(NetworkInformation&&);
+
+ std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorInterface {
+ public:
+ AndroidNetworkMonitor(JNIEnv* env,
+ const JavaRef<jobject>& j_application_context,
+ const FieldTrialsView& field_trials);
+ ~AndroidNetworkMonitor() override;
+
+ // TODO(sakal): Remove once down stream dependencies have been updated.
+ static void SetAndroidContext(JNIEnv* jni, jobject context) {}
+
+ void Start() override;
+ void Stop() override;
+
+ // Does `this` NetworkMonitorInterface implement BindSocketToNetwork?
+ // Only Android returns true.
+ virtual bool SupportsBindSocketToNetwork() const override { return true; }
+
+ rtc::NetworkBindingResult BindSocketToNetwork(
+ int socket_fd,
+ const rtc::IPAddress& address,
+ absl::string_view if_name) override;
+
+ InterfaceInfo GetInterfaceInfo(absl::string_view if_name) override;
+
+ // Always expected to be called on the network thread.
+ void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ void NotifyConnectionTypeChanged(JNIEnv* env,
+ const JavaRef<jobject>& j_caller);
+ void NotifyOfNetworkConnect(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_network_info);
+ void NotifyOfNetworkDisconnect(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ jlong network_handle);
+ void NotifyOfActiveNetworkList(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobjectArray>& j_network_infos);
+ void NotifyOfNetworkPreference(JNIEnv* env,
+ const JavaRef<jobject>& j_caller,
+ const JavaRef<jobject>& j_connection_type,
+ jint preference);
+
+ // Visible for testing.
+ absl::optional<NetworkHandle> FindNetworkHandleFromAddressOrName(
+ const rtc::IPAddress& address,
+ absl::string_view ifname) const;
+
+ private:
+ void reset();
+ void OnNetworkConnected_n(const NetworkInformation& network_info);
+ void OnNetworkDisconnected_n(NetworkHandle network_handle);
+ void OnNetworkPreference_n(NetworkType type,
+ rtc::NetworkPreference preference);
+
+ rtc::NetworkPreference GetNetworkPreference(rtc::AdapterType) const;
+ absl::optional<NetworkHandle> FindNetworkHandleFromIfname(
+ absl::string_view ifname) const;
+
+ const int android_sdk_int_;
+ ScopedJavaGlobalRef<jobject> j_application_context_;
+ ScopedJavaGlobalRef<jobject> j_network_monitor_;
+ rtc::Thread* const network_thread_;
+ bool started_ RTC_GUARDED_BY(network_thread_) = false;
+ std::map<std::string, NetworkHandle, rtc::AbslStringViewCmp>
+ network_handle_by_if_name_ RTC_GUARDED_BY(network_thread_);
+ std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_
+ RTC_GUARDED_BY(network_thread_);
+ std::map<NetworkHandle, NetworkInformation> network_info_by_handle_
+ RTC_GUARDED_BY(network_thread_);
+ std::map<rtc::AdapterType, rtc::NetworkPreference>
+ network_preference_by_adapter_type_ RTC_GUARDED_BY(network_thread_);
+ bool find_network_handle_without_ipv6_temporary_part_
+ RTC_GUARDED_BY(network_thread_) = false;
+ bool surface_cellular_types_ RTC_GUARDED_BY(network_thread_) = false;
+
+ // NOTE: if bind_using_ifname_ is TRUE
+ // then the adapter name is used with substring matching as follows:
+ // An adapater name repored by android as 'wlan0'
+ // will be matched with 'v4-wlan0' ("v4-wlan0".find("wlan0") != npos).
+ // This applies to adapter_type_by_name_, vpn_underlying_adapter_type_by_name_
+ // and FindNetworkHandleFromIfname.
+ bool bind_using_ifname_ RTC_GUARDED_BY(network_thread_) = true;
+
+ // NOTE: disable_is_adapter_available_ is a kill switch for the impl.
+ // of IsAdapterAvailable().
+ bool disable_is_adapter_available_ RTC_GUARDED_BY(network_thread_) = false;
+
+ rtc::scoped_refptr<PendingTaskSafetyFlag> safety_flag_
+ RTC_PT_GUARDED_BY(network_thread_) = nullptr;
+
+ const FieldTrialsView& field_trials_;
+
+ friend class webrtc::test::AndroidNetworkMonitorTest;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ // Deprecated. Pass in application context to this class.
+ AndroidNetworkMonitorFactory();
+
+ AndroidNetworkMonitorFactory(JNIEnv* env,
+ const JavaRef<jobject>& j_application_context);
+
+ ~AndroidNetworkMonitorFactory() override;
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor(
+ const FieldTrialsView& field_trials) override;
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_application_context_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+// TODO(magjed): Remove once external clients are updated.
+namespace webrtc_jni {
+
+using webrtc::jni::AndroidNetworkMonitor;
+using webrtc::jni::AndroidNetworkMonitorFactory;
+
+} // namespace webrtc_jni
+
+#endif // SDK_ANDROID_SRC_JNI_ANDROID_NETWORK_MONITOR_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc
new file mode 100644
index 0000000000..4f3152dc6f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.cc
@@ -0,0 +1,167 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+#include <utility>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/NativeAndroidVideoTrackSource_jni.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// MediaCodec wants resolution to be divisible by 2.
+const int kRequiredResolutionAlignment = 2;
+
+VideoRotation jintToVideoRotation(jint rotation) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ return static_cast<VideoRotation>(rotation);
+}
+
+absl::optional<std::pair<int, int>> OptionalAspectRatio(jint j_width,
+ jint j_height) {
+ if (j_width > 0 && j_height > 0)
+ return std::pair<int, int>(j_width, j_height);
+ return absl::nullopt;
+}
+
+} // namespace
+
+AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
+ JNIEnv* jni,
+ bool is_screencast,
+ bool align_timestamps)
+ : AdaptedVideoTrackSource(kRequiredResolutionAlignment),
+ signaling_thread_(signaling_thread),
+ is_screencast_(is_screencast),
+ align_timestamps_(align_timestamps) {
+ RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
+}
+AndroidVideoTrackSource::~AndroidVideoTrackSource() = default;
+
+bool AndroidVideoTrackSource::is_screencast() const {
+ return is_screencast_.load();
+}
+
+absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
+ return false;
+}
+
+void AndroidVideoTrackSource::SetState(JNIEnv* env,
+ jboolean j_is_live) {
+ const SourceState state = j_is_live ? kLive : kEnded;
+ if (state_.exchange(state) != state) {
+ if (rtc::Thread::Current() == signaling_thread_) {
+ FireOnChanged();
+ } else {
+ // TODO(sakal): Is this even necessary, does FireOnChanged have to be
+ // called from signaling thread?
+ signaling_thread_->PostTask([this] { FireOnChanged(); });
+ }
+ }
+}
+
+AndroidVideoTrackSource::SourceState AndroidVideoTrackSource::state() const {
+ return state_.load();
+}
+
+bool AndroidVideoTrackSource::remote() const {
+ return false;
+}
+
+void AndroidVideoTrackSource::SetIsScreencast(JNIEnv* env,
+ jboolean j_is_screencast) {
+ is_screencast_.store(j_is_screencast);
+}
+
+ScopedJavaLocalRef<jobject> AndroidVideoTrackSource::AdaptFrame(
+ JNIEnv* env,
+ jint j_width,
+ jint j_height,
+ jint j_rotation,
+ jlong j_timestamp_ns) {
+ const VideoRotation rotation = jintToVideoRotation(j_rotation);
+
+ const int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
+ const int64_t aligned_timestamp_ns =
+ align_timestamps_ ? rtc::kNumNanosecsPerMicrosec *
+ timestamp_aligner_.TranslateTimestamp(
+ camera_time_us, rtc::TimeMicros())
+ : j_timestamp_ns;
+
+ int adapted_width = 0;
+ int adapted_height = 0;
+ int crop_width = 0;
+ int crop_height = 0;
+ int crop_x = 0;
+ int crop_y = 0;
+ bool drop;
+
+ // TODO(magjed): Move this logic to users of NativeAndroidVideoTrackSource
+ // instead, in order to keep this native wrapping layer as thin as possible.
+ if (rotation % 180 == 0) {
+ drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
+ j_width, j_height, camera_time_us, &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y);
+ } else {
+ // Swap all width/height and x/y.
+ drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
+ j_height, j_width, camera_time_us, &adapted_height, &adapted_width,
+ &crop_height, &crop_width, &crop_y, &crop_x);
+ }
+
+ return Java_NativeAndroidVideoTrackSource_createFrameAdaptationParameters(
+ env, crop_x, crop_y, crop_width, crop_height, adapted_width,
+ adapted_height, aligned_timestamp_ns, drop);
+}
+
+void AndroidVideoTrackSource::OnFrameCaptured(
+ JNIEnv* env,
+ jint j_rotation,
+ jlong j_timestamp_ns,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(env, j_video_frame_buffer);
+ const VideoRotation rotation = jintToVideoRotation(j_rotation);
+
+ // AdaptedVideoTrackSource handles applying rotation for I420 frames.
+ if (apply_rotation() && rotation != kVideoRotation_0)
+ buffer = buffer->ToI420();
+
+ OnFrame(VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_rotation(rotation)
+ .set_timestamp_us(j_timestamp_ns / rtc::kNumNanosecsPerMicrosec)
+ .build());
+}
+
+void AndroidVideoTrackSource::AdaptOutputFormat(
+ JNIEnv* env,
+ jint j_landscape_width,
+ jint j_landscape_height,
+ const JavaRef<jobject>& j_max_landscape_pixel_count,
+ jint j_portrait_width,
+ jint j_portrait_height,
+ const JavaRef<jobject>& j_max_portrait_pixel_count,
+ const JavaRef<jobject>& j_max_fps) {
+ video_adapter()->OnOutputFormatRequest(
+ OptionalAspectRatio(j_landscape_width, j_landscape_height),
+ JavaToNativeOptionalInt(env, j_max_landscape_pixel_count),
+ OptionalAspectRatio(j_portrait_width, j_portrait_height),
+ JavaToNativeOptionalInt(env, j_max_portrait_pixel_count),
+ JavaToNativeOptionalInt(env, j_max_fps));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h
new file mode 100644
index 0000000000..625633b90b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/android_video_track_source.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
+#define API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
+
+#include <jni.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "media/base/adapted_video_track_source.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/timestamp_aligner.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// This class needs to be used in conjunction with the Java corresponding class
+// NativeAndroidVideoTrackSource. This class is thred safe and methods can be
+// called from any thread, but if frames A, B, ..., are sent to adaptFrame(),
+// the adapted frames adaptedA, adaptedB, ..., needs to be passed in the same
+// order to onFrameCaptured().
+class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
+ public:
+ AndroidVideoTrackSource(rtc::Thread* signaling_thread,
+ JNIEnv* jni,
+ bool is_screencast,
+ bool align_timestamps);
+ ~AndroidVideoTrackSource() override;
+
+ bool is_screencast() const override;
+
+ // Indicates that the encoder should denoise video before encoding it.
+ // If it is not set, the default configuration is used which is different
+ // depending on video codec.
+ absl::optional<bool> needs_denoising() const override;
+
+ void SetState(SourceState state);
+
+ SourceState state() const override;
+
+ bool remote() const override;
+
+ // This function should be called before delivering any frame to determine if
+ // the frame should be dropped or what the cropping and scaling parameters
+ // should be. This function is thread safe and can be called from any thread.
+ // This function returns
+ // NativeAndroidVideoTrackSource.FrameAdaptationParameters, or null if the
+ // frame should be dropped.
+ ScopedJavaLocalRef<jobject> AdaptFrame(JNIEnv* env,
+ jint j_width,
+ jint j_height,
+ jint j_rotation,
+ jlong j_timestamp_ns);
+
+ // This function converts and passes the frame on to the rest of the C++
+ // WebRTC layer. Note that GetFrameAdaptationParameters() is expected to be
+ // called first and that the delivered frame conforms to those parameters.
+ // This function is thread safe and can be called from any thread.
+ void OnFrameCaptured(JNIEnv* env,
+ jint j_rotation,
+ jlong j_timestamp_ns,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ void SetState(JNIEnv* env,
+ jboolean j_is_live);
+
+ void AdaptOutputFormat(JNIEnv* env,
+ jint j_landscape_width,
+ jint j_landscape_height,
+ const JavaRef<jobject>& j_max_landscape_pixel_count,
+ jint j_portrait_width,
+ jint j_portrait_height,
+ const JavaRef<jobject>& j_max_portrait_pixel_count,
+ const JavaRef<jobject>& j_max_fps);
+
+ void SetIsScreencast(JNIEnv* env, jboolean j_is_screencast);
+
+ private:
+ rtc::Thread* signaling_thread_;
+ std::atomic<SourceState> state_;
+ std::atomic<bool> is_screencast_;
+ rtc::TimestampAligner timestamp_aligner_;
+ const bool align_timestamps_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS
new file mode 100644
index 0000000000..9a3adee687
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+base/android/jni_android.h",
+ "+modules/audio_device",
+]
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS
new file mode 100644
index 0000000000..95662c195c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/OWNERS
@@ -0,0 +1 @@
+henrika@webrtc.org
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc
new file mode 100644
index 0000000000..ae8fcb9613
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.cc
@@ -0,0 +1,247 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_player.h"
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+namespace jni {
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageOutputStreamDisconnected,
+};
+
+AAudioPlayer::AAudioPlayer(const AudioParameters& audio_parameters)
+ : main_thread_(rtc::Thread::Current()),
+ aaudio_(audio_parameters, AAUDIO_DIRECTION_OUTPUT, this) {
+ RTC_LOG(LS_INFO) << "ctor";
+ thread_checker_aaudio_.Detach();
+}
+
+AAudioPlayer::~AAudioPlayer() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ Terminate();
+ RTC_LOG(LS_INFO) << "#detected underruns: " << underrun_count_;
+}
+
+int AAudioPlayer::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ if (aaudio_.audio_parameters().channels() == 2) {
+ RTC_DLOG(LS_WARNING) << "Stereo mode is enabled";
+ }
+ return 0;
+}
+
+int AAudioPlayer::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ StopPlayout();
+ return 0;
+}
+
+int AAudioPlayer::InitPlayout() {
+ RTC_LOG(LS_INFO) << "InitPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!playing_);
+ if (!aaudio_.Init()) {
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+}
+
+bool AAudioPlayer::PlayoutIsInitialized() const {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ return initialized_;
+}
+
+int AAudioPlayer::StartPlayout() {
+ RTC_LOG(LS_INFO) << "StartPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DCHECK(!playing_);
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Playout can not start since InitPlayout must succeed first";
+ return 0;
+ }
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!aaudio_.Start()) {
+ return -1;
+ }
+ underrun_count_ = aaudio_.xrun_count();
+ first_data_callback_ = true;
+ playing_ = true;
+ return 0;
+}
+
+int AAudioPlayer::StopPlayout() {
+ RTC_LOG(LS_INFO) << "StopPlayout";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ if (!aaudio_.Stop()) {
+ RTC_LOG(LS_ERROR) << "StopPlayout failed";
+ return -1;
+ }
+ thread_checker_aaudio_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ return 0;
+}
+
+bool AAudioPlayer::Playing() const {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ return playing_;
+}
+
+void AAudioPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_DLOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ audio_device_buffer_ = audioBuffer;
+ const AudioParameters audio_parameters = aaudio_.audio_parameters();
+ audio_device_buffer_->SetPlayoutSampleRate(audio_parameters.sample_rate());
+ audio_device_buffer_->SetPlayoutChannels(audio_parameters.channels());
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to ask for any number
+ // of samples (and not only multiple of 10ms) to match the optimal buffer
+ // size per callback used by AAudio.
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+}
+
+bool AAudioPlayer::SpeakerVolumeIsAvailable() {
+ return false;
+}
+
+int AAudioPlayer::SetSpeakerVolume(uint32_t volume) {
+ return -1;
+}
+
+absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {
+ RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error);
+ // TODO(henrika): investigate if we can use a thread checker here. Initial
+ // tests shows that this callback can sometimes be called on a unique thread
+ // but according to the documentation it should be on the same thread as the
+ // data callback.
+ // RTC_DCHECK_RUN_ON(&thread_checker_aaudio_);
+ if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // The stream is disconnected and any attempt to use it will return
+ // AAUDIO_ERROR_DISCONNECTED.
+ RTC_LOG(LS_WARNING) << "Output stream disconnected";
+ // AAudio documentation states: "You should not close or reopen the stream
+ // from the callback, use another thread instead". A message is therefore
+ // sent to the main thread to do the restart operation.
+ RTC_DCHECK(main_thread_);
+ main_thread_->Post(RTC_FROM_HERE, this, kMessageOutputStreamDisconnected);
+ }
+}
+
+aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data,
+ int32_t num_frames) {
+ RTC_DCHECK_RUN_ON(&thread_checker_aaudio_);
+ // Log device id in first data callback to ensure that a valid device is
+ // utilized.
+ if (first_data_callback_) {
+ RTC_LOG(LS_INFO) << "--- First output data callback: "
+ "device id="
+ << aaudio_.device_id();
+ first_data_callback_ = false;
+ }
+
+ // Check if the underrun count has increased. If it has, increase the buffer
+ // size by adding the size of a burst. It will reduce the risk of underruns
+ // at the expense of an increased latency.
+ // TODO(henrika): enable possibility to disable and/or tune the algorithm.
+ const int32_t underrun_count = aaudio_.xrun_count();
+ if (underrun_count > underrun_count_) {
+ RTC_LOG(LS_ERROR) << "Underrun detected: " << underrun_count;
+ underrun_count_ = underrun_count;
+ aaudio_.IncreaseOutputBufferSize();
+ }
+
+ // Estimate latency between writing an audio frame to the output stream and
+ // the time that same frame is played out on the output audio device.
+ latency_millis_ = aaudio_.EstimateLatencyMillis();
+ // TODO(henrika): use for development only.
+ if (aaudio_.frames_written() % (1000 * aaudio_.frames_per_burst()) == 0) {
+ RTC_DLOG(LS_INFO) << "output latency: " << latency_millis_
+ << ", num_frames: " << num_frames;
+ }
+
+ // Read audio data from the WebRTC source using the FineAudioBuffer object
+ // and write that data into `audio_data` to be played out by AAudio.
+ // Prime output with zeros during a short initial phase to avoid distortion.
+ // TODO(henrika): do more work to figure out of if the initial forced silence
+ // period is really needed.
+ if (aaudio_.frames_written() < 50 * aaudio_.frames_per_burst()) {
+ const size_t num_bytes =
+ sizeof(int16_t) * aaudio_.samples_per_frame() * num_frames;
+ memset(audio_data, 0, num_bytes);
+ } else {
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::MakeArrayView(static_cast<int16_t*>(audio_data),
+ aaudio_.samples_per_frame() * num_frames),
+ static_cast<int>(latency_millis_ + 0.5));
+ }
+
+ // TODO(henrika): possibly add trace here to be included in systrace.
+ // See https://developer.android.com/studio/profile/systrace-commandline.html.
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+void AAudioPlayer::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ switch (msg->message_id) {
+ case kMessageOutputStreamDisconnected:
+ HandleStreamDisconnected();
+ break;
+ }
+}
+
+void AAudioPlayer::HandleStreamDisconnected() {
+ RTC_DCHECK_RUN_ON(&main_thread_checker_);
+ RTC_DLOG(LS_INFO) << "HandleStreamDisconnected";
+ if (!initialized_ || !playing_) {
+ return;
+ }
+ // Perform a restart by first closing the disconnected stream and then start
+ // a new stream; this time using the new (preferred) audio output device.
+ StopPlayout();
+ InitPlayout();
+ StartPlayout();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h
new file mode 100644
index 0000000000..9e775ecfa3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_player.h
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
+
+#include <aaudio/AAudio.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/thread.h"
+#include "rtc_base/thread_annotations.h"
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+class AudioDeviceBuffer;
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements low-latency 16-bit mono PCM audio output support for Android
+// using the C based AAudio API.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All public methods must also be called on the same thread. A thread checker
+// will DCHECK if any method is called on an invalid thread. Audio buffers
+// are requested on a dedicated high-priority thread owned by AAudio.
+//
+// The existing design forces the user to call InitPlayout() after StopPlayout()
+// to be able to call StartPlayout() again. This is in line with how the Java-
+// based implementation works.
+//
+// An audio stream can be disconnected, e.g. when an audio device is removed.
+// This implementation will restart the audio stream using the new preferred
+// device if such an event happens.
+//
+// Also supports automatic buffer-size adjustment based on underrun detections
+// where the internal AAudio buffer can be increased when needed. It will
+// reduce the risk of underruns (~glitches) at the expense of an increased
+// latency.
+class AAudioPlayer final : public AudioOutput,
+ public AAudioObserverInterface,
+ public rtc::MessageHandler {
+ public:
+ explicit AAudioPlayer(const AudioParameters& audio_parameters);
+ ~AAudioPlayer() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int StartPlayout() override;
+ int StopPlayout() override;
+ bool Playing() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // Not implemented in AAudio.
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+
+ protected:
+ // AAudioObserverInterface implementation.
+
+ // For an output stream, this function should render and write `num_frames`
+ // of data in the streams current data format to the `audio_data` buffer.
+ // Called on a real-time thread owned by AAudio.
+ aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) override;
+ // AAudio calls this functions if any error occurs on a callback thread.
+ // Called on a real-time thread owned by AAudio.
+ void OnErrorCallback(aaudio_result_t error) override;
+
+ // rtc::MessageHandler used for restart messages from the error-callback
+ // thread to the main (creating) thread.
+ void OnMessage(rtc::Message* msg) override;
+
+ private:
+ // Closes the existing stream and starts a new stream.
+ void HandleStreamDisconnected();
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker main_thread_checker_;
+
+ // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a
+ // real-time thread owned by AAudio. Detached during construction of this
+ // object.
+ SequenceChecker thread_checker_aaudio_;
+
+ // The thread on which this object is created on.
+ rtc::Thread* main_thread_;
+
+ // Wraps all AAudio resources. Contains an output stream using the default
+ // output audio device. Can be accessed on both the main thread and the
+ // real-time thread owned by AAudio. See separate AAudio documentation about
+ // thread safety.
+ AAudioWrapper aaudio_;
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples.
+ // Example: native buffer size can be 192 audio frames at 48kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but AAudio asks for 192
+ // in each callback (once every 4th ms). This class can then ask for 192 and
+ // the FineAudioBuffer will ask WebRTC for new data approximately only every
+ // second callback and also cache non-utilized audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Counts number of detected underrun events reported by AAudio.
+ int32_t underrun_count_ = 0;
+
+ // True only for the first data callback in each audio session.
+ bool first_data_callback_ = true;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and set by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_ RTC_GUARDED_BY(main_thread_checker_) =
+ nullptr;
+
+ bool initialized_ RTC_GUARDED_BY(main_thread_checker_) = false;
+ bool playing_ RTC_GUARDED_BY(main_thread_checker_) = false;
+
+ // Estimated latency between writing an audio frame to the output stream and
+ // the time that same frame is played out on the output audio device.
+ double latency_millis_ RTC_GUARDED_BY(thread_checker_aaudio_) = 0;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_PLAYER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc
new file mode 100644
index 0000000000..d66c1d0235
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.cc
@@ -0,0 +1,234 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+
+namespace jni {
+
+enum AudioDeviceMessageType : uint32_t {
+ kMessageInputStreamDisconnected,
+};
+
+AAudioRecorder::AAudioRecorder(const AudioParameters& audio_parameters)
+ : main_thread_(rtc::Thread::Current()),
+ aaudio_(audio_parameters, AAUDIO_DIRECTION_INPUT, this) {
+ RTC_LOG(LS_INFO) << "ctor";
+ thread_checker_aaudio_.Detach();
+}
+
+AAudioRecorder::~AAudioRecorder() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ RTC_LOG(LS_INFO) << "detected owerflows: " << overflow_count_;
+}
+
+int AAudioRecorder::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (aaudio_.audio_parameters().channels() == 2) {
+ RTC_DLOG(LS_WARNING) << "Stereo mode is enabled";
+ }
+ return 0;
+}
+
+int AAudioRecorder::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ return 0;
+}
+
+int AAudioRecorder::InitRecording() {
+ RTC_LOG(LS_INFO) << "InitRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!recording_);
+ if (!aaudio_.Init()) {
+ return -1;
+ }
+ initialized_ = true;
+ return 0;
+}
+
+bool AAudioRecorder::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int AAudioRecorder::StartRecording() {
+ RTC_LOG(LS_INFO) << "StartRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!recording_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ if (!aaudio_.Start()) {
+ return -1;
+ }
+ overflow_count_ = aaudio_.xrun_count();
+ first_data_callback_ = true;
+ recording_ = true;
+ return 0;
+}
+
+int AAudioRecorder::StopRecording() {
+ RTC_LOG(LS_INFO) << "StopRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ if (!aaudio_.Stop()) {
+ return -1;
+ }
+ thread_checker_aaudio_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ return 0;
+}
+
+bool AAudioRecorder::Recording() const {
+ return recording_;
+}
+
+void AAudioRecorder::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const AudioParameters audio_parameters = aaudio_.audio_parameters();
+ audio_device_buffer_->SetRecordingSampleRate(audio_parameters.sample_rate());
+ audio_device_buffer_->SetRecordingChannels(audio_parameters.channels());
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to deliver any number
+ // of samples (and not only multiples of 10ms which WebRTC uses) to match the
+ // native AAudio buffer size.
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+}
+
+bool AAudioRecorder::IsAcousticEchoCancelerSupported() const {
+ return false;
+}
+
+bool AAudioRecorder::IsNoiseSuppressorSupported() const {
+ return false;
+}
+
+int AAudioRecorder::EnableBuiltInAEC(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInAEC: " << enable;
+ RTC_LOG(LS_ERROR) << "Not implemented";
+ return -1;
+}
+
+int AAudioRecorder::EnableBuiltInNS(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInNS: " << enable;
+ RTC_LOG(LS_ERROR) << "Not implemented";
+ return -1;
+}
+
+void AAudioRecorder::OnErrorCallback(aaudio_result_t error) {
+ RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error);
+ // RTC_DCHECK(thread_checker_aaudio_.IsCurrent());
+ if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) {
+ // The stream is disconnected and any attempt to use it will return
+ // AAUDIO_ERROR_DISCONNECTED..
+ RTC_LOG(LS_WARNING) << "Input stream disconnected => restart is required";
+ // AAudio documentation states: "You should not close or reopen the stream
+ // from the callback, use another thread instead". A message is therefore
+ // sent to the main thread to do the restart operation.
+ RTC_DCHECK(main_thread_);
+ main_thread_->Post(RTC_FROM_HERE, this, kMessageInputStreamDisconnected);
+ }
+}
+
+// Read and process `num_frames` of data from the `audio_data` buffer.
+// TODO(henrika): possibly add trace here to be included in systrace.
+// See https://developer.android.com/studio/profile/systrace-commandline.html.
+aaudio_data_callback_result_t AAudioRecorder::OnDataCallback(
+ void* audio_data,
+ int32_t num_frames) {
+ // TODO(henrika): figure out why we sometimes hit this one.
+ // RTC_DCHECK(thread_checker_aaudio_.IsCurrent());
+ // RTC_LOG(LS_INFO) << "OnDataCallback: " << num_frames;
+ // Drain the input buffer at first callback to ensure that it does not
+ // contain any old data. Will also ensure that the lowest possible latency
+ // is obtained.
+ if (first_data_callback_) {
+ RTC_LOG(LS_INFO) << "--- First input data callback: "
+ "device id="
+ << aaudio_.device_id();
+ aaudio_.ClearInputStream(audio_data, num_frames);
+ first_data_callback_ = false;
+ }
+ // Check if the overflow counter has increased and if so log a warning.
+ // TODO(henrika): possible add UMA stat or capacity extension.
+ const int32_t overflow_count = aaudio_.xrun_count();
+ if (overflow_count > overflow_count_) {
+ RTC_LOG(LS_ERROR) << "Overflow detected: " << overflow_count;
+ overflow_count_ = overflow_count;
+ }
+ // Estimated time between an audio frame was recorded by the input device and
+ // it can read on the input stream.
+ latency_millis_ = aaudio_.EstimateLatencyMillis();
+ // TODO(henrika): use for development only.
+ if (aaudio_.frames_read() % (1000 * aaudio_.frames_per_burst()) == 0) {
+ RTC_DLOG(LS_INFO) << "input latency: " << latency_millis_
+ << ", num_frames: " << num_frames;
+ }
+ // Copy recorded audio in `audio_data` to the WebRTC sink using the
+ // FineAudioBuffer object.
+ fine_audio_buffer_->DeliverRecordedData(
+ rtc::MakeArrayView(static_cast<const int16_t*>(audio_data),
+ aaudio_.samples_per_frame() * num_frames),
+ static_cast<int>(latency_millis_ + 0.5));
+
+ return AAUDIO_CALLBACK_RESULT_CONTINUE;
+}
+
+void AAudioRecorder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ switch (msg->message_id) {
+ case kMessageInputStreamDisconnected:
+ HandleStreamDisconnected();
+ break;
+ default:
+ RTC_LOG(LS_ERROR) << "Invalid message id: " << msg->message_id;
+ break;
+ }
+}
+
+void AAudioRecorder::HandleStreamDisconnected() {
+ RTC_DCHECK_RUN_ON(&thread_checker_);
+ RTC_LOG(LS_INFO) << "HandleStreamDisconnected";
+ if (!initialized_ || !recording_) {
+ return;
+ }
+ // Perform a restart by first closing the disconnected stream and then start
+ // a new stream; this time using the new (preferred) audio input device.
+ // TODO(henrika): resolve issue where a one restart attempt leads to a long
+ // sequence of new calls to OnErrorCallback().
+ // See b/73148976 for details.
+ StopRecording();
+ InitRecording();
+ StartRecording();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h
new file mode 100644
index 0000000000..a911577bfe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_recorder.h
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
+
+#include <aaudio/AAudio.h>
+
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "rtc_base/message_handler.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+class AudioDeviceBuffer;
+
+namespace jni {
+
+// Implements low-latency 16-bit mono PCM audio input support for Android
+// using the C based AAudio API.
+//
+// An instance must be created and destroyed on one and the same thread.
+// All public methods must also be called on the same thread. A thread checker
+// will RTC_DCHECK if any method is called on an invalid thread. Audio buffers
+// are delivered on a dedicated high-priority thread owned by AAudio.
+//
+// The existing design forces the user to call InitRecording() after
+// StopRecording() to be able to call StartRecording() again. This is in line
+// with how the Java- based implementation works.
+//
+// TODO(henrika): add comments about device changes and adaptive buffer
+// management.
+class AAudioRecorder : public AudioInput,
+ public AAudioObserverInterface,
+ public rtc::MessageHandler {
+ public:
+ explicit AAudioRecorder(const AudioParameters& audio_parameters);
+ ~AAudioRecorder() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int StartRecording() override;
+ int StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // TODO(henrika): add support using AAudio APIs when available.
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+ int EnableBuiltInAEC(bool enable) override;
+ int EnableBuiltInNS(bool enable) override;
+
+ protected:
+ // AAudioObserverInterface implementation.
+
+ // For an input stream, this function should read `num_frames` of recorded
+ // data, in the stream's current data format, from the `audio_data` buffer.
+ // Called on a real-time thread owned by AAudio.
+ aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) override;
+
+ // AAudio calls this function if any error occurs on a callback thread.
+ // Called on a real-time thread owned by AAudio.
+ void OnErrorCallback(aaudio_result_t error) override;
+
+ // rtc::MessageHandler used for restart messages.
+ void OnMessage(rtc::Message* msg) override;
+
+ private:
+ // Closes the existing stream and starts a new stream.
+ void HandleStreamDisconnected();
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a
+ // real-time thread owned by AAudio. Detached during construction of this
+ // object.
+ SequenceChecker thread_checker_aaudio_;
+
+ // The thread on which this object is created on.
+ rtc::Thread* main_thread_;
+
+ // Wraps all AAudio resources. Contains an input stream using the default
+ // input audio device.
+ AAudioWrapper aaudio_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_ = nullptr;
+
+ bool initialized_ = false;
+ bool recording_ = false;
+
+ // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms
+ // chunks of audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Counts number of detected overflow events reported by AAudio.
+ int32_t overflow_count_ = 0;
+
+ // Estimated time between an audio frame was recorded by the input device and
+ // it can read on the input stream.
+ double latency_millis_ = 0;
+
+ // True only for the first data callback in each audio session.
+ bool first_data_callback_ = true;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_RECORDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
new file mode 100644
index 0000000000..6c20703108
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.cc
@@ -0,0 +1,501 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
+
+#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/time_utils.h"
+
+#define LOG_ON_ERROR(op) \
+ do { \
+ aaudio_result_t result = (op); \
+ if (result != AAUDIO_OK) { \
+ RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \
+ } \
+ } while (0)
+
+#define RETURN_ON_ERROR(op, ...) \
+ do { \
+ aaudio_result_t result = (op); \
+ if (result != AAUDIO_OK) { \
+ RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \
+ return __VA_ARGS__; \
+ } \
+ } while (0)
+
+namespace webrtc {
+
+namespace jni {
+
+namespace {
+
+const char* DirectionToString(aaudio_direction_t direction) {
+ switch (direction) {
+ case AAUDIO_DIRECTION_OUTPUT:
+ return "OUTPUT";
+ case AAUDIO_DIRECTION_INPUT:
+ return "INPUT";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* SharingModeToString(aaudio_sharing_mode_t mode) {
+ switch (mode) {
+ case AAUDIO_SHARING_MODE_EXCLUSIVE:
+ return "EXCLUSIVE";
+ case AAUDIO_SHARING_MODE_SHARED:
+ return "SHARED";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* PerformanceModeToString(aaudio_performance_mode_t mode) {
+ switch (mode) {
+ case AAUDIO_PERFORMANCE_MODE_NONE:
+ return "NONE";
+ case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
+ return "POWER_SAVING";
+ case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+ return "LOW_LATENCY";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+const char* FormatToString(int32_t id) {
+ switch (id) {
+ case AAUDIO_FORMAT_INVALID:
+ return "INVALID";
+ case AAUDIO_FORMAT_UNSPECIFIED:
+ return "UNSPECIFIED";
+ case AAUDIO_FORMAT_PCM_I16:
+ return "PCM_I16";
+ case AAUDIO_FORMAT_PCM_FLOAT:
+ return "FLOAT";
+ default:
+ return "UNKNOWN";
+ }
+}
+
+void ErrorCallback(AAudioStream* stream,
+ void* user_data,
+ aaudio_result_t error) {
+ RTC_DCHECK(user_data);
+ AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data);
+ RTC_LOG(LS_WARNING) << "ErrorCallback: "
+ << DirectionToString(aaudio_wrapper->direction());
+ RTC_DCHECK(aaudio_wrapper->observer());
+ aaudio_wrapper->observer()->OnErrorCallback(error);
+}
+
+aaudio_data_callback_result_t DataCallback(AAudioStream* stream,
+ void* user_data,
+ void* audio_data,
+ int32_t num_frames) {
+ RTC_DCHECK(user_data);
+ RTC_DCHECK(audio_data);
+ AAudioWrapper* aaudio_wrapper = reinterpret_cast<AAudioWrapper*>(user_data);
+ RTC_DCHECK(aaudio_wrapper->observer());
+ return aaudio_wrapper->observer()->OnDataCallback(audio_data, num_frames);
+}
+
+// Wraps the stream builder object to ensure that it is released properly when
+// the stream builder goes out of scope.
+class ScopedStreamBuilder {
+ public:
+ ScopedStreamBuilder() {
+ LOG_ON_ERROR(AAudio_createStreamBuilder(&builder_));
+ RTC_DCHECK(builder_);
+ }
+ ~ScopedStreamBuilder() {
+ if (builder_) {
+ LOG_ON_ERROR(AAudioStreamBuilder_delete(builder_));
+ }
+ }
+
+ AAudioStreamBuilder* get() const { return builder_; }
+
+ private:
+ AAudioStreamBuilder* builder_ = nullptr;
+};
+
+} // namespace
+
+AAudioWrapper::AAudioWrapper(const AudioParameters& audio_parameters,
+ aaudio_direction_t direction,
+ AAudioObserverInterface* observer)
+ : audio_parameters_(audio_parameters),
+ direction_(direction),
+ observer_(observer) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(observer_);
+ aaudio_thread_checker_.Detach();
+ RTC_LOG(LS_INFO) << audio_parameters_.ToString();
+}
+
+AAudioWrapper::~AAudioWrapper() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!stream_);
+}
+
+bool AAudioWrapper::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Creates a stream builder which can be used to open an audio stream.
+ ScopedStreamBuilder builder;
+ // Configures the stream builder using audio parameters given at construction.
+ SetStreamConfiguration(builder.get());
+ // Opens a stream based on options in the stream builder.
+ if (!OpenStream(builder.get())) {
+ return false;
+ }
+ // Ensures that the opened stream could activate the requested settings.
+ if (!VerifyStreamConfiguration()) {
+ return false;
+ }
+ // Optimizes the buffer scheme for lowest possible latency and creates
+ // additional buffer logic to match the 10ms buffer size used in WebRTC.
+ if (!OptimizeBuffers()) {
+ return false;
+ }
+ LogStreamState();
+ return true;
+}
+
+bool AAudioWrapper::Start() {
+ RTC_LOG(LS_INFO) << "Start";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // TODO(henrika): this state check might not be needed.
+ aaudio_stream_state_t current_state = AAudioStream_getState(stream_);
+ if (current_state != AAUDIO_STREAM_STATE_OPEN) {
+ RTC_LOG(LS_ERROR) << "Invalid state: "
+ << AAudio_convertStreamStateToText(current_state);
+ return false;
+ }
+ // Asynchronous request for the stream to start.
+ RETURN_ON_ERROR(AAudioStream_requestStart(stream_), false);
+ LogStreamState();
+ return true;
+}
+
+bool AAudioWrapper::Stop() {
+ RTC_LOG(LS_INFO) << "Stop: " << DirectionToString(direction());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Asynchronous request for the stream to stop.
+ RETURN_ON_ERROR(AAudioStream_requestStop(stream_), false);
+ CloseStream();
+ aaudio_thread_checker_.Detach();
+ return true;
+}
+
+double AAudioWrapper::EstimateLatencyMillis() const {
+ RTC_DCHECK(stream_);
+ double latency_millis = 0.0;
+ if (direction() == AAUDIO_DIRECTION_INPUT) {
+ // For input streams. Best guess we can do is to use the current burst size
+ // as delay estimate.
+ latency_millis = static_cast<double>(frames_per_burst()) / sample_rate() *
+ rtc::kNumMillisecsPerSec;
+ } else {
+ int64_t existing_frame_index;
+ int64_t existing_frame_presentation_time;
+ // Get the time at which a particular frame was presented to audio hardware.
+ aaudio_result_t result = AAudioStream_getTimestamp(
+ stream_, CLOCK_MONOTONIC, &existing_frame_index,
+ &existing_frame_presentation_time);
+ // Results are only valid when the stream is in AAUDIO_STREAM_STATE_STARTED.
+ if (result == AAUDIO_OK) {
+ // Get write index for next audio frame.
+ int64_t next_frame_index = frames_written();
+ // Number of frames between next frame and the existing frame.
+ int64_t frame_index_delta = next_frame_index - existing_frame_index;
+ // Assume the next frame will be written now.
+ int64_t next_frame_write_time = rtc::TimeNanos();
+ // Calculate time when next frame will be presented to the hardware taking
+ // sample rate into account.
+ int64_t frame_time_delta =
+ (frame_index_delta * rtc::kNumNanosecsPerSec) / sample_rate();
+ int64_t next_frame_presentation_time =
+ existing_frame_presentation_time + frame_time_delta;
+ // Derive a latency estimate given results above.
+ latency_millis = static_cast<double>(next_frame_presentation_time -
+ next_frame_write_time) /
+ rtc::kNumNanosecsPerMillisec;
+ }
+ }
+ return latency_millis;
+}
+
+// Returns new buffer size or a negative error value if buffer size could not
+// be increased.
+bool AAudioWrapper::IncreaseOutputBufferSize() {
+ RTC_LOG(LS_INFO) << "IncreaseBufferSize";
+ RTC_DCHECK(stream_);
+ RTC_DCHECK(aaudio_thread_checker_.IsCurrent());
+ RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_OUTPUT);
+ aaudio_result_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_);
+ // Try to increase size of buffer with one burst to reduce risk of underrun.
+ buffer_size += frames_per_burst();
+ // Verify that the new buffer size is not larger than max capacity.
+ // TODO(henrika): keep track of case when we reach the capacity limit.
+ const int32_t max_buffer_size = buffer_capacity_in_frames();
+ if (buffer_size > max_buffer_size) {
+ RTC_LOG(LS_ERROR) << "Required buffer size (" << buffer_size
+ << ") is higher than max: " << max_buffer_size;
+ return false;
+ }
+ RTC_LOG(LS_INFO) << "Updating buffer size to: " << buffer_size
+ << " (max=" << max_buffer_size << ")";
+ buffer_size = AAudioStream_setBufferSizeInFrames(stream_, buffer_size);
+ if (buffer_size < 0) {
+ RTC_LOG(LS_ERROR) << "Failed to change buffer size: "
+ << AAudio_convertResultToText(buffer_size);
+ return false;
+ }
+ RTC_LOG(LS_INFO) << "Buffer size changed to: " << buffer_size;
+ return true;
+}
+
+void AAudioWrapper::ClearInputStream(void* audio_data, int32_t num_frames) {
+ RTC_LOG(LS_INFO) << "ClearInputStream";
+ RTC_DCHECK(stream_);
+ RTC_DCHECK(aaudio_thread_checker_.IsCurrent());
+ RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_INPUT);
+ aaudio_result_t cleared_frames = 0;
+ do {
+ cleared_frames = AAudioStream_read(stream_, audio_data, num_frames, 0);
+ } while (cleared_frames > 0);
+}
+
+AAudioObserverInterface* AAudioWrapper::observer() const {
+ return observer_;
+}
+
+AudioParameters AAudioWrapper::audio_parameters() const {
+ return audio_parameters_;
+}
+
+int32_t AAudioWrapper::samples_per_frame() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSamplesPerFrame(stream_);
+}
+
+int32_t AAudioWrapper::buffer_size_in_frames() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getBufferSizeInFrames(stream_);
+}
+
+int32_t AAudioWrapper::buffer_capacity_in_frames() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getBufferCapacityInFrames(stream_);
+}
+
+int32_t AAudioWrapper::device_id() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getDeviceId(stream_);
+}
+
+int32_t AAudioWrapper::xrun_count() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getXRunCount(stream_);
+}
+
+int32_t AAudioWrapper::format() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFormat(stream_);
+}
+
+int32_t AAudioWrapper::sample_rate() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSampleRate(stream_);
+}
+
+int32_t AAudioWrapper::channel_count() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getChannelCount(stream_);
+}
+
+int32_t AAudioWrapper::frames_per_callback() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesPerDataCallback(stream_);
+}
+
+aaudio_sharing_mode_t AAudioWrapper::sharing_mode() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getSharingMode(stream_);
+}
+
+aaudio_performance_mode_t AAudioWrapper::performance_mode() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getPerformanceMode(stream_);
+}
+
+aaudio_stream_state_t AAudioWrapper::stream_state() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getState(stream_);
+}
+
+int64_t AAudioWrapper::frames_written() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesWritten(stream_);
+}
+
+int64_t AAudioWrapper::frames_read() const {
+ RTC_DCHECK(stream_);
+ return AAudioStream_getFramesRead(stream_);
+}
+
+void AAudioWrapper::SetStreamConfiguration(AAudioStreamBuilder* builder) {
+ RTC_LOG(LS_INFO) << "SetStreamConfiguration";
+ RTC_DCHECK(builder);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Request usage of default primary output/input device.
+ // TODO(henrika): verify that default device follows Java APIs.
+ // https://developer.android.com/reference/android/media/AudioDeviceInfo.html.
+ AAudioStreamBuilder_setDeviceId(builder, AAUDIO_UNSPECIFIED);
+ // Use preferred sample rate given by the audio parameters.
+ AAudioStreamBuilder_setSampleRate(builder, audio_parameters().sample_rate());
+ // Use preferred channel configuration given by the audio parameters.
+ AAudioStreamBuilder_setChannelCount(builder, audio_parameters().channels());
+ // Always use 16-bit PCM audio sample format.
+ AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_I16);
+ // TODO(henrika): investigate effect of using AAUDIO_SHARING_MODE_EXCLUSIVE.
+ // Ask for exclusive mode since this will give us the lowest possible latency.
+ // If exclusive mode isn't available, shared mode will be used instead.
+ AAudioStreamBuilder_setSharingMode(builder, AAUDIO_SHARING_MODE_SHARED);
+ // Use the direction that was given at construction.
+ AAudioStreamBuilder_setDirection(builder, direction_);
+ // TODO(henrika): investigate performance using different performance modes.
+ AAudioStreamBuilder_setPerformanceMode(builder,
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY);
+ // Given that WebRTC applications require low latency, our audio stream uses
+ // an asynchronous callback function to transfer data to and from the
+ // application. AAudio executes the callback in a higher-priority thread that
+ // has better performance.
+ AAudioStreamBuilder_setDataCallback(builder, DataCallback, this);
+ // Request that AAudio calls this functions if any error occurs on a callback
+ // thread.
+ AAudioStreamBuilder_setErrorCallback(builder, ErrorCallback, this);
+}
+
+bool AAudioWrapper::OpenStream(AAudioStreamBuilder* builder) {
+ RTC_LOG(LS_INFO) << "OpenStream";
+ RTC_DCHECK(builder);
+ AAudioStream* stream = nullptr;
+ RETURN_ON_ERROR(AAudioStreamBuilder_openStream(builder, &stream), false);
+ stream_ = stream;
+ LogStreamConfiguration();
+ return true;
+}
+
+void AAudioWrapper::CloseStream() {
+ RTC_LOG(LS_INFO) << "CloseStream";
+ RTC_DCHECK(stream_);
+ LOG_ON_ERROR(AAudioStream_close(stream_));
+ stream_ = nullptr;
+}
+
+void AAudioWrapper::LogStreamConfiguration() {
+ RTC_DCHECK(stream_);
+ char ss_buf[1024];
+ rtc::SimpleStringBuilder ss(ss_buf);
+ ss << "Stream Configuration: ";
+ ss << "sample rate=" << sample_rate() << ", channels=" << channel_count();
+ ss << ", samples per frame=" << samples_per_frame();
+ ss << ", format=" << FormatToString(format());
+ ss << ", sharing mode=" << SharingModeToString(sharing_mode());
+ ss << ", performance mode=" << PerformanceModeToString(performance_mode());
+ ss << ", direction=" << DirectionToString(direction());
+ ss << ", device id=" << AAudioStream_getDeviceId(stream_);
+ ss << ", frames per callback=" << frames_per_callback();
+ RTC_LOG(LS_INFO) << ss.str();
+}
+
+void AAudioWrapper::LogStreamState() {
+ RTC_LOG(LS_INFO) << "AAudio stream state: "
+ << AAudio_convertStreamStateToText(stream_state());
+}
+
+bool AAudioWrapper::VerifyStreamConfiguration() {
+ RTC_LOG(LS_INFO) << "VerifyStreamConfiguration";
+ RTC_DCHECK(stream_);
+ // TODO(henrika): should we verify device ID as well?
+ if (AAudioStream_getSampleRate(stream_) != audio_parameters().sample_rate()) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested sample rate";
+ return false;
+ }
+ if (AAudioStream_getChannelCount(stream_) !=
+ static_cast<int32_t>(audio_parameters().channels())) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested channel count";
+ return false;
+ }
+ if (AAudioStream_getFormat(stream_) != AAUDIO_FORMAT_PCM_I16) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested format";
+ return false;
+ }
+ if (AAudioStream_getSharingMode(stream_) != AAUDIO_SHARING_MODE_SHARED) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested sharing mode";
+ return false;
+ }
+ if (AAudioStream_getPerformanceMode(stream_) !=
+ AAUDIO_PERFORMANCE_MODE_LOW_LATENCY) {
+ RTC_LOG(LS_ERROR) << "Stream unable to use requested performance mode";
+ return false;
+ }
+ if (AAudioStream_getDirection(stream_) != direction()) {
+ RTC_LOG(LS_ERROR) << "Stream direction could not be set";
+ return false;
+ }
+ if (AAudioStream_getSamplesPerFrame(stream_) !=
+ static_cast<int32_t>(audio_parameters().channels())) {
+ RTC_LOG(LS_ERROR) << "Invalid number of samples per frame";
+ return false;
+ }
+ return true;
+}
+
+bool AAudioWrapper::OptimizeBuffers() {
+ RTC_LOG(LS_INFO) << "OptimizeBuffers";
+ RTC_DCHECK(stream_);
+ // Maximum number of frames that can be filled without blocking.
+ RTC_LOG(LS_INFO) << "max buffer capacity in frames: "
+ << buffer_capacity_in_frames();
+ // Query the number of frames that the application should read or write at
+ // one time for optimal performance.
+ int32_t frames_per_burst = AAudioStream_getFramesPerBurst(stream_);
+ RTC_LOG(LS_INFO) << "frames per burst for optimal performance: "
+ << frames_per_burst;
+ frames_per_burst_ = frames_per_burst;
+ if (direction() == AAUDIO_DIRECTION_INPUT) {
+ // There is no point in calling setBufferSizeInFrames() for input streams
+ // since it has no effect on the performance (latency in this case).
+ return true;
+ }
+ // Set buffer size to same as burst size to guarantee lowest possible latency.
+ // This size might change for output streams if underruns are detected and
+ // automatic buffer adjustment is enabled.
+ AAudioStream_setBufferSizeInFrames(stream_, frames_per_burst);
+ int32_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_);
+ if (buffer_size != frames_per_burst) {
+ RTC_LOG(LS_ERROR) << "Failed to use optimal buffer burst size";
+ return false;
+ }
+ // Maximum number of frames that can be filled without blocking.
+ RTC_LOG(LS_INFO) << "buffer burst size in frames: " << buffer_size;
+ return true;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h
new file mode 100644
index 0000000000..cbc78a0a25
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/aaudio_wrapper.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
+
+#include <aaudio/AAudio.h>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// AAudio callback interface for audio transport to/from the AAudio stream.
+// The interface also contains an error callback method for notifications of
+// e.g. device changes.
+class AAudioObserverInterface {
+ public:
+ // Audio data will be passed in our out of this function dependning on the
+ // direction of the audio stream. This callback function will be called on a
+ // real-time thread owned by AAudio.
+ virtual aaudio_data_callback_result_t OnDataCallback(void* audio_data,
+ int32_t num_frames) = 0;
+ // AAudio will call this functions if any error occurs on a callback thread.
+ // In response, this function could signal or launch another thread to reopen
+ // a stream on another device. Do not reopen the stream in this callback.
+ virtual void OnErrorCallback(aaudio_result_t error) = 0;
+
+ protected:
+ virtual ~AAudioObserverInterface() {}
+};
+
+// Utility class which wraps the C-based AAudio API into a more handy C++ class
+// where the underlying resources (AAudioStreamBuilder and AAudioStream) are
+// encapsulated. User must set the direction (in or out) at construction since
+// it defines the stream type and the direction of the data flow in the
+// AAudioObserverInterface.
+//
+// AAudio is a new Android C API introduced in the Android O (26) release.
+// It is designed for high-performance audio applications that require low
+// latency. Applications communicate with AAudio by reading and writing data
+// to streams.
+//
+// Each stream is attached to a single audio device, where each audio device
+// has a unique ID. The ID can be used to bind an audio stream to a specific
+// audio device but this implementation lets AAudio choose the default primary
+// device instead (device selection takes place in Java). A stream can only
+// move data in one direction. When a stream is opened, Android checks to
+// ensure that the audio device and stream direction agree.
+class AAudioWrapper {
+ public:
+ AAudioWrapper(const AudioParameters& audio_parameters,
+ aaudio_direction_t direction,
+ AAudioObserverInterface* observer);
+ ~AAudioWrapper();
+
+ bool Init();
+ bool Start();
+ bool Stop();
+
+ // For output streams: estimates latency between writing an audio frame to
+ // the output stream and the time that same frame is played out on the output
+ // audio device.
+ // For input streams: estimates latency between reading an audio frame from
+ // the input stream and the time that same frame was recorded on the input
+ // audio device.
+ double EstimateLatencyMillis() const;
+
+ // Increases the internal buffer size for output streams by one burst size to
+ // reduce the risk of underruns. Can be used while a stream is active.
+ bool IncreaseOutputBufferSize();
+
+ // Drains the recording stream of any existing data by reading from it until
+ // it's empty. Can be used to clear out old data before starting a new audio
+ // session.
+ void ClearInputStream(void* audio_data, int32_t num_frames);
+
+ AAudioObserverInterface* observer() const;
+ AudioParameters audio_parameters() const;
+ int32_t samples_per_frame() const;
+ int32_t buffer_size_in_frames() const;
+ int32_t buffer_capacity_in_frames() const;
+ int32_t device_id() const;
+ int32_t xrun_count() const;
+ int32_t format() const;
+ int32_t sample_rate() const;
+ int32_t channel_count() const;
+ int32_t frames_per_callback() const;
+ aaudio_sharing_mode_t sharing_mode() const;
+ aaudio_performance_mode_t performance_mode() const;
+ aaudio_stream_state_t stream_state() const;
+ int64_t frames_written() const;
+ int64_t frames_read() const;
+ aaudio_direction_t direction() const { return direction_; }
+ AAudioStream* stream() const { return stream_; }
+ int32_t frames_per_burst() const { return frames_per_burst_; }
+
+ private:
+ void SetStreamConfiguration(AAudioStreamBuilder* builder);
+ bool OpenStream(AAudioStreamBuilder* builder);
+ void CloseStream();
+ void LogStreamConfiguration();
+ void LogStreamState();
+ bool VerifyStreamConfiguration();
+ bool OptimizeBuffers();
+
+ SequenceChecker thread_checker_;
+ SequenceChecker aaudio_thread_checker_;
+ const AudioParameters audio_parameters_;
+ const aaudio_direction_t direction_;
+ AAudioObserverInterface* observer_ = nullptr;
+ AAudioStream* stream_ = nullptr;
+ int32_t frames_per_burst_ = 0;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AAUDIO_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h
new file mode 100644
index 0000000000..fdecf384c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_common.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
+
+namespace webrtc {
+
+namespace jni {
+
+const int kDefaultSampleRate = 44100;
+// Delay estimates for the two different supported modes. These values are based
+// on real-time round-trip delay estimates on a large set of devices and they
+// are lower bounds since the filter length is 128 ms, so the AEC works for
+// delays in the range [50, ~170] ms and [150, ~270] ms. Note that, in most
+// cases, the lowest delay estimate will not be utilized since devices that
+// support low-latency output audio often supports HW AEC as well.
+const int kLowLatencyModeDelayEstimateInMilliseconds = 50;
+const int kHighLatencyModeDelayEstimateInMilliseconds = 150;
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_COMMON_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc
new file mode 100644
index 0000000000..7c59d3e432
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.cc
@@ -0,0 +1,650 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+#include <memory>
+#include <utility>
+
+#include "api/make_ref_counted.h"
+#include "api/sequence_checker.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/task_queue/task_queue_factory.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_audio_device_module_base_jni/WebRtcAudioManager_jni.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// This class combines a generic instance of an AudioInput and a generic
+// instance of an AudioOutput to create an AudioDeviceModule. This is mostly
+// done by delegating to the audio input/output with some glue code. This class
+// also directly implements some of the AudioDeviceModule methods with dummy
+// implementations.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread.
+// A thread checker will RTC_DCHECK if any method is called on an invalid
+// thread.
+// TODO(henrika): it might be useful to also support a scenario where the ADM
+// is constructed on thread T1, used on thread T2 and destructed on T2 or T3.
+// If so, care must be taken to ensure that only T2 is a COM thread.
+class AndroidAudioDeviceModule : public AudioDeviceModule {
+ public:
+ // For use with UMA logging. Must be kept in sync with histograms.xml in
+ // Chrome, located at
+ // https://cs.chromium.org/chromium/src/tools/metrics/histograms/histograms.xml
+ enum class InitStatus {
+ OK = 0,
+ PLAYOUT_ERROR = 1,
+ RECORDING_ERROR = 2,
+ OTHER_ERROR = 3,
+ NUM_STATUSES = 4
+ };
+
+ AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output)
+ : audio_layer_(audio_layer),
+ is_stereo_playout_supported_(is_stereo_playout_supported),
+ is_stereo_record_supported_(is_stereo_record_supported),
+ playout_delay_ms_(playout_delay_ms),
+ task_queue_factory_(CreateDefaultTaskQueueFactory()),
+ input_(std::move(audio_input)),
+ output_(std::move(audio_output)),
+ initialized_(false) {
+ RTC_CHECK(input_);
+ RTC_CHECK(output_);
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ thread_checker_.Detach();
+ }
+
+ ~AndroidAudioDeviceModule() override { RTC_DLOG(LS_INFO) << __FUNCTION__; }
+
+ int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer* audioLayer) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *audioLayer = audio_layer_;
+ return 0;
+ }
+
+ int32_t RegisterAudioCallback(AudioTransport* audioCallback) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return audio_device_buffer_->RegisterAudioCallback(audioCallback);
+ }
+
+ int32_t Init() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ =
+ std::make_unique<AudioDeviceBuffer>(task_queue_factory_.get());
+ AttachAudioBuffer();
+ if (initialized_) {
+ return 0;
+ }
+ InitStatus status;
+ if (output_->Init() != 0) {
+ status = InitStatus::PLAYOUT_ERROR;
+ } else if (input_->Init() != 0) {
+ output_->Terminate();
+ status = InitStatus::RECORDING_ERROR;
+ } else {
+ initialized_ = true;
+ status = InitStatus::OK;
+ }
+ RTC_HISTOGRAM_ENUMERATION("WebRTC.Audio.InitializationResult",
+ static_cast<int>(status),
+ static_cast<int>(InitStatus::NUM_STATUSES));
+ if (status != InitStatus::OK) {
+ RTC_LOG(LS_ERROR) << "Audio device initialization failed.";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t Terminate() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return 0;
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ int32_t err = input_->Terminate();
+ err |= output_->Terminate();
+ initialized_ = false;
+ thread_checker_.Detach();
+ audio_device_buffer_.reset(nullptr);
+ RTC_DCHECK_EQ(err, 0);
+ return err;
+ }
+
+ bool Initialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << ":" << initialized_;
+ return initialized_;
+ }
+
+ int16_t PlayoutDevices() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_LOG(LS_INFO) << "output: " << 1;
+ return 1;
+ }
+
+ int16_t RecordingDevices() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DLOG(LS_INFO) << "output: " << 1;
+ return 1;
+ }
+
+ int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetPlayoutDevice(uint16_t index) override {
+ // OK to use but it has no effect currently since device selection is
+ // done using Andoid APIs instead.
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ return 0;
+ }
+
+ int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetRecordingDevice(uint16_t index) override {
+ // OK to use but it has no effect currently since device selection is
+ // done using Andoid APIs instead.
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")";
+ return 0;
+ }
+
+ int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) override {
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t PlayoutIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = true;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t InitPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (PlayoutIsInitialized()) {
+ return 0;
+ }
+ int32_t result = output_->InitPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool PlayoutIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return output_->PlayoutIsInitialized();
+ }
+
+ int32_t RecordingIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = true;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t InitRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (RecordingIsInitialized()) {
+ return 0;
+ }
+ int32_t result = input_->InitRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool RecordingIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return input_->RecordingIsInitialized();
+ }
+
+ int32_t StartPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (Playing()) {
+ return 0;
+ }
+ int32_t result = output_->StartPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess",
+ static_cast<int>(result == 0));
+ if (result == 0) {
+ // Only start playing the audio device buffer if starting the audio
+ // output succeeded.
+ audio_device_buffer_->StartPlayout();
+ }
+ return result;
+ }
+
+ int32_t StopPlayout() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (!Playing())
+ return 0;
+ RTC_LOG(LS_INFO) << __FUNCTION__;
+ audio_device_buffer_->StopPlayout();
+ int32_t result = output_->StopPlayout();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool Playing() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return output_->Playing();
+ }
+
+ int32_t StartRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (Recording()) {
+ return 0;
+ }
+ int32_t result = input_->StartRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess",
+ static_cast<int>(result == 0));
+ if (result == 0) {
+ // Only start recording the audio device buffer if starting the audio
+ // input succeeded.
+ audio_device_buffer_->StartRecording();
+ }
+ return result;
+ }
+
+ int32_t StopRecording() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ if (!Recording())
+ return 0;
+ audio_device_buffer_->StopRecording();
+ int32_t result = input_->StopRecording();
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess",
+ static_cast<int>(result == 0));
+ return result;
+ }
+
+ bool Recording() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return input_->Recording();
+ }
+
+ int32_t InitSpeaker() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_ ? 0 : -1;
+ }
+
+ bool SpeakerIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_;
+ }
+
+ int32_t InitMicrophone() override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_ ? 0 : -1;
+ }
+
+ bool MicrophoneIsInitialized() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return initialized_;
+ }
+
+ int32_t SpeakerVolumeIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ *available = output_->SpeakerVolumeIsAvailable();
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetSpeakerVolume(uint32_t volume) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ return output_->SetSpeakerVolume(volume);
+ }
+
+ int32_t SpeakerVolume(uint32_t* output_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> volume = output_->SpeakerVolume();
+ if (!volume)
+ return -1;
+ *output_volume = *volume;
+ RTC_DLOG(LS_INFO) << "output: " << *volume;
+ return 0;
+ }
+
+ int32_t MaxSpeakerVolume(uint32_t* output_max_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
+ if (!max_volume)
+ return -1;
+ *output_max_volume = *max_volume;
+ return 0;
+ }
+
+ int32_t MinSpeakerVolume(uint32_t* output_min_volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return -1;
+ absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
+ if (!min_volume)
+ return -1;
+ *output_min_volume = *min_volume;
+ return 0;
+ }
+
+ int32_t MicrophoneVolumeIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = false;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return -1;
+ }
+
+ int32_t SetMicrophoneVolume(uint32_t volume) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneVolume(uint32_t* volume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MinMicrophoneVolume(uint32_t* minVolume) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SpeakerMuteIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetSpeakerMute(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SpeakerMute(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneMuteIsAvailable(bool* available) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t SetMicrophoneMute(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t MicrophoneMute(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_CHECK_NOTREACHED();
+ }
+
+ int32_t StereoPlayoutIsAvailable(bool* available) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = is_stereo_playout_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetStereoPlayout(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ // Android does not support changes between mono and stero on the fly. The
+ // use of stereo or mono is determined by the audio layer. It is allowed
+ // to call this method if that same state is not modified.
+ bool available = is_stereo_playout_supported_;
+ if (enable != available) {
+ RTC_LOG(LS_WARNING) << "changing stereo playout not supported";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t StereoPlayout(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *enabled = is_stereo_playout_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *enabled;
+ return 0;
+ }
+
+ int32_t StereoRecordingIsAvailable(bool* available) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *available = is_stereo_record_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *available;
+ return 0;
+ }
+
+ int32_t SetStereoRecording(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ // Android does not support changes between mono and stero on the fly. The
+ // use of stereo or mono is determined by the audio layer. It is allowed
+ // to call this method if that same state is not modified.
+ bool available = is_stereo_record_supported_;
+ if (enable != available) {
+ RTC_LOG(LS_WARNING) << "changing stereo recording not supported";
+ return -1;
+ }
+ return 0;
+ }
+
+ int32_t StereoRecording(bool* enabled) const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ *enabled = is_stereo_record_supported_;
+ RTC_DLOG(LS_INFO) << "output: " << *enabled;
+ return 0;
+ }
+
+ int32_t PlayoutDelay(uint16_t* delay_ms) const override {
+ // Best guess we can do is to use half of the estimated total delay.
+ *delay_ms = playout_delay_ms_ / 2;
+ RTC_DCHECK_GT(*delay_ms, 0);
+ return 0;
+ }
+
+ // Returns true if the device both supports built in AEC and the device
+ // is not blocklisted.
+ // Currently, if OpenSL ES is used in both directions, this method will still
+ // report the correct value and it has the correct effect. As an example:
+ // a device supports built in AEC and this method returns true. Libjingle
+ // will then disable the WebRTC based AEC and that will work for all devices
+ // (mainly Nexus) even when OpenSL ES is used for input since our current
+ // implementation will enable built-in AEC by default also for OpenSL ES.
+ // The only "bad" thing that happens today is that when Libjingle calls
+ // OpenSLESRecorder::EnableBuiltInAEC() it will not have any real effect and
+ // a "Not Implemented" log will be filed. This non-perfect state will remain
+ // until I have added full support for audio effects based on OpenSL ES APIs.
+ bool BuiltInAECIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return false;
+ bool isAvailable = input_->IsAcousticEchoCancelerSupported();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ // Not implemented for any input device on Android.
+ bool BuiltInAGCIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ RTC_DLOG(LS_INFO) << "output: " << false;
+ return false;
+ }
+
+ // Returns true if the device both supports built in NS and the device
+ // is not blocklisted.
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ // In addition, see comments for BuiltInAECIsAvailable().
+ bool BuiltInNSIsAvailable() const override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ if (!initialized_)
+ return false;
+ bool isAvailable = input_->IsNoiseSuppressorSupported();
+ RTC_DLOG(LS_INFO) << "output: " << isAvailable;
+ return isAvailable;
+ }
+
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ int32_t EnableBuiltInAEC(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ if (!initialized_)
+ return -1;
+ RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available";
+ int32_t result = input_->EnableBuiltInAEC(enable);
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ return result;
+ }
+
+ int32_t EnableBuiltInAGC(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ RTC_CHECK_NOTREACHED();
+ }
+
+ // TODO(henrika): add implementation for OpenSL ES based audio as well.
+ int32_t EnableBuiltInNS(bool enable) override {
+ RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")";
+ if (!initialized_)
+ return -1;
+ RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available";
+ int32_t result = input_->EnableBuiltInNS(enable);
+ RTC_DLOG(LS_INFO) << "output: " << result;
+ return result;
+ }
+
+ int32_t GetPlayoutUnderrunCount() const override {
+ if (!initialized_)
+ return -1;
+ return output_->GetPlayoutUnderrunCount();
+ }
+
+ int32_t AttachAudioBuffer() {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ output_->AttachAudioBuffer(audio_device_buffer_.get());
+ input_->AttachAudioBuffer(audio_device_buffer_.get());
+ return 0;
+ }
+
+ private:
+ SequenceChecker thread_checker_;
+
+ const AudioDeviceModule::AudioLayer audio_layer_;
+ const bool is_stereo_playout_supported_;
+ const bool is_stereo_record_supported_;
+ const uint16_t playout_delay_ms_;
+ const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
+ const std::unique_ptr<AudioInput> input_;
+ const std::unique_ptr<AudioOutput> output_;
+ std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
+
+ bool initialized_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
+ const JavaRef<jobject>& j_context) {
+ return Java_WebRtcAudioManager_getAudioManager(env, j_context);
+}
+
+int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioManager_getSampleRate(env, j_audio_manager);
+}
+
+void GetAudioParameters(JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager,
+ int input_sample_rate,
+ int output_sample_rate,
+ bool use_stereo_input,
+ bool use_stereo_output,
+ AudioParameters* input_parameters,
+ AudioParameters* output_parameters) {
+ const int output_channels = use_stereo_output ? 2 : 1;
+ const int input_channels = use_stereo_input ? 2 : 1;
+ const size_t output_buffer_size = Java_WebRtcAudioManager_getOutputBufferSize(
+ env, j_context, j_audio_manager, output_sample_rate, output_channels);
+ const size_t input_buffer_size = Java_WebRtcAudioManager_getInputBufferSize(
+ env, j_context, j_audio_manager, input_sample_rate, input_channels);
+ output_parameters->reset(output_sample_rate,
+ static_cast<size_t>(output_channels),
+ static_cast<size_t>(output_buffer_size));
+ input_parameters->reset(input_sample_rate,
+ static_cast<size_t>(input_channels),
+ static_cast<size_t>(input_buffer_size));
+ RTC_CHECK(input_parameters->is_valid());
+ RTC_CHECK(output_parameters->is_valid());
+}
+
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output) {
+ RTC_DLOG(LS_INFO) << __FUNCTION__;
+ return rtc::make_ref_counted<AndroidAudioDeviceModule>(
+ audio_layer, is_stereo_playout_supported, is_stereo_record_supported,
+ playout_delay_ms, std::move(audio_input), std::move(audio_output));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h
new file mode 100644
index 0000000000..1918336c5a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_device_module.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+class AudioDeviceBuffer;
+
+namespace jni {
+
+class AudioInput {
+ public:
+ virtual ~AudioInput() {}
+
+ virtual int32_t Init() = 0;
+ virtual int32_t Terminate() = 0;
+
+ virtual int32_t InitRecording() = 0;
+ virtual bool RecordingIsInitialized() const = 0;
+
+ virtual int32_t StartRecording() = 0;
+ virtual int32_t StopRecording() = 0;
+ virtual bool Recording() const = 0;
+
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+
+ // Returns true if the audio input supports built-in audio effects for AEC and
+ // NS.
+ virtual bool IsAcousticEchoCancelerSupported() const = 0;
+ virtual bool IsNoiseSuppressorSupported() const = 0;
+
+ virtual int32_t EnableBuiltInAEC(bool enable) = 0;
+ virtual int32_t EnableBuiltInNS(bool enable) = 0;
+};
+
+class AudioOutput {
+ public:
+ virtual ~AudioOutput() {}
+
+ virtual int32_t Init() = 0;
+ virtual int32_t Terminate() = 0;
+ virtual int32_t InitPlayout() = 0;
+ virtual bool PlayoutIsInitialized() const = 0;
+ virtual int32_t StartPlayout() = 0;
+ virtual int32_t StopPlayout() = 0;
+ virtual bool Playing() const = 0;
+ virtual bool SpeakerVolumeIsAvailable() = 0;
+ virtual int SetSpeakerVolume(uint32_t volume) = 0;
+ virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
+ virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+ virtual int GetPlayoutUnderrunCount() = 0;
+};
+
+// Extract an android.media.AudioManager from an android.content.Context.
+ScopedJavaLocalRef<jobject> GetAudioManager(JNIEnv* env,
+ const JavaRef<jobject>& j_context);
+
+// Get default audio sample rate by querying an android.media.AudioManager.
+int GetDefaultSampleRate(JNIEnv* env, const JavaRef<jobject>& j_audio_manager);
+
+// Get audio input and output parameters based on a number of settings.
+void GetAudioParameters(JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager,
+ int input_sample_rate,
+ int output_sample_rate,
+ bool use_stereo_input,
+ bool use_stereo_output,
+ AudioParameters* input_parameters,
+ AudioParameters* output_parameters);
+
+// Glue together an audio input and audio output to get an AudioDeviceModule.
+rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::AudioLayer audio_layer,
+ bool is_stereo_playout_supported,
+ bool is_stereo_record_supported,
+ uint16_t playout_delay_ms,
+ std::unique_ptr<AudioInput> audio_input,
+ std::unique_ptr<AudioOutput> audio_output);
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc
new file mode 100644
index 0000000000..d206297001
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc
@@ -0,0 +1,267 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+
+#include <string>
+#include <utility>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace jni {
+
+namespace {
+// Scoped class which logs its time of life as a UMA statistic. It generates
+// a histogram which measures the time it takes for a method/scope to execute.
+class ScopedHistogramTimer {
+ public:
+ explicit ScopedHistogramTimer(const std::string& name)
+ : histogram_name_(name), start_time_ms_(rtc::TimeMillis()) {}
+ ~ScopedHistogramTimer() {
+ const int64_t life_time_ms = rtc::TimeSince(start_time_ms_);
+ RTC_HISTOGRAM_COUNTS_1000(histogram_name_, life_time_ms);
+ RTC_LOG(LS_INFO) << histogram_name_ << ": " << life_time_ms;
+ }
+
+ private:
+ const std::string histogram_name_;
+ int64_t start_time_ms_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> AudioRecordJni::CreateJavaWebRtcAudioRecord(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioRecord_Constructor(env, j_context, j_audio_manager);
+}
+
+AudioRecordJni::AudioRecordJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ int total_delay_ms,
+ const JavaRef<jobject>& j_audio_record)
+ : j_audio_record_(env, j_audio_record),
+ audio_parameters_(audio_parameters),
+ total_delay_ms_(total_delay_ms),
+ direct_buffer_address_(nullptr),
+ direct_buffer_capacity_in_bytes_(0),
+ frames_per_buffer_(0),
+ initialized_(false),
+ recording_(false),
+ audio_device_buffer_(nullptr) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(audio_parameters_.is_valid());
+ Java_WebRtcAudioRecord_setNativeAudioRecord(env, j_audio_record_,
+ jni::jlongFromPointer(this));
+ // Detach from this thread since construction is allowed to happen on a
+ // different thread.
+ thread_checker_.Detach();
+ thread_checker_java_.Detach();
+}
+
+AudioRecordJni::~AudioRecordJni() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+}
+
+int32_t AudioRecordJni::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ env_ = AttachCurrentThreadIfNeeded();
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+int32_t AudioRecordJni::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ thread_checker_.Detach();
+ return 0;
+}
+
+int32_t AudioRecordJni::InitRecording() {
+ RTC_LOG(LS_INFO) << "InitRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (initialized_) {
+ // Already initialized.
+ return 0;
+ }
+ RTC_DCHECK(!recording_);
+ ScopedHistogramTimer timer("WebRTC.Audio.InitRecordingDurationMs");
+
+ int frames_per_buffer = Java_WebRtcAudioRecord_initRecording(
+ env_, j_audio_record_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()));
+ if (frames_per_buffer < 0) {
+ direct_buffer_address_ = nullptr;
+ RTC_LOG(LS_ERROR) << "InitRecording failed";
+ return -1;
+ }
+ frames_per_buffer_ = static_cast<size_t>(frames_per_buffer);
+ RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ RTC_CHECK_EQ(direct_buffer_capacity_in_bytes_,
+ frames_per_buffer_ * bytes_per_frame);
+ RTC_CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer());
+ initialized_ = true;
+ return 0;
+}
+
+bool AudioRecordJni::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int32_t AudioRecordJni::StartRecording() {
+ RTC_LOG(LS_INFO) << "StartRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (recording_) {
+ // Already recording.
+ return 0;
+ }
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Recording can not start since InitRecording must succeed first";
+ return 0;
+ }
+ ScopedHistogramTimer timer("WebRTC.Audio.StartRecordingDurationMs");
+ if (!Java_WebRtcAudioRecord_startRecording(env_, j_audio_record_)) {
+ RTC_LOG(LS_ERROR) << "StartRecording failed";
+ return -1;
+ }
+ recording_ = true;
+ return 0;
+}
+
+int32_t AudioRecordJni::StopRecording() {
+ RTC_LOG(LS_INFO) << "StopRecording";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ // Check if the audio source matched the activated recording session but only
+ // if a valid results exists to avoid invalid statistics.
+ if (Java_WebRtcAudioRecord_isAudioConfigVerified(env_, j_audio_record_)) {
+ const bool session_was_ok =
+ Java_WebRtcAudioRecord_isAudioSourceMatchingRecordingSession(
+ env_, j_audio_record_);
+ RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.SourceMatchesRecordingSession",
+ session_was_ok);
+ RTC_LOG(LS_INFO)
+ << "HISTOGRAM(WebRTC.Audio.SourceMatchesRecordingSession): "
+ << session_was_ok;
+ }
+ if (!Java_WebRtcAudioRecord_stopRecording(env_, j_audio_record_)) {
+ RTC_LOG(LS_ERROR) << "StopRecording failed";
+ return -1;
+ }
+ // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded()
+ // next time StartRecording() is called since it will create a new Java
+ // thread.
+ thread_checker_java_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ direct_buffer_address_ = nullptr;
+ return 0;
+}
+
+bool AudioRecordJni::Recording() const {
+ return recording_;
+}
+
+void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_LOG(LS_INFO) << "SetRecordingSampleRate(" << sample_rate_hz << ")";
+ audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ RTC_LOG(LS_INFO) << "SetRecordingChannels(" << channels << ")";
+ audio_device_buffer_->SetRecordingChannels(channels);
+}
+
+bool AudioRecordJni::IsAcousticEchoCancelerSupported() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_isAcousticEchoCancelerSupported(
+ env_, j_audio_record_);
+}
+
+bool AudioRecordJni::IsNoiseSuppressorSupported() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_isNoiseSuppressorSupported(env_,
+ j_audio_record_);
+}
+
+int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInAEC(" << enable << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_enableBuiltInAEC(env_, j_audio_record_, enable)
+ ? 0
+ : -1;
+}
+
+int32_t AudioRecordJni::EnableBuiltInNS(bool enable) {
+ RTC_LOG(LS_INFO) << "EnableBuiltInNS(" << enable << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioRecord_enableBuiltInNS(env_, j_audio_record_, enable)
+ ? 0
+ : -1;
+}
+
+void AudioRecordJni::CacheDirectBufferAddress(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ const JavaParamRef<jobject>& byte_buffer) {
+ RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!direct_buffer_address_);
+ direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
+ jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
+ RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
+ direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
+}
+
+// This method is called on a high-priority thread from Java. The name of
+// the thread is 'AudioRecordThread'.
+void AudioRecordJni::DataIsRecorded(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ int length,
+ int64_t capture_timestamp_ns) {
+ RTC_DCHECK(thread_checker_java_.IsCurrent());
+ if (!audio_device_buffer_) {
+ RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
+ return;
+ }
+ audio_device_buffer_->SetRecordedBuffer(
+ direct_buffer_address_, frames_per_buffer_, capture_timestamp_ns);
+ // We provide one (combined) fixed delay estimate for the APM and use the
+ // `playDelayMs` parameter only. Components like the AEC only sees the sum
+ // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter.
+ audio_device_buffer_->SetVQEData(total_delay_ms_, 0);
+ if (audio_device_buffer_->DeliverRecordedData() == -1) {
+ RTC_LOG(LS_INFO) << "AudioDeviceBuffer::DeliverRecordedData failed";
+ }
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h
new file mode 100644
index 0000000000..49c905daaf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_record_jni.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio input support for Android using the Java
+// AudioRecord interface. Most of the work is done by its Java counterpart in
+// WebRtcAudioRecord.java. This class is created and lives on a thread in
+// C++-land, but recorded audio buffers are delivered on a high-priority
+// thread managed by the Java class.
+//
+// The Java class makes use of AudioEffect features (mainly AEC) which are
+// first available in Jelly Bean. If it is instantiated running against earlier
+// SDKs, the AEC provided by the APM in WebRTC must be used and enabled
+// separately instead.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+//
+// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
+// Additional thread checking guarantees that no other (possibly non attached)
+// thread is used.
+class AudioRecordJni : public AudioInput {
+ public:
+ static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioRecord(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager);
+
+ AudioRecordJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ int total_delay_ms,
+ const JavaRef<jobject>& j_webrtc_audio_record);
+ ~AudioRecordJni() override;
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+
+ int32_t InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int32_t StartRecording() override;
+ int32_t StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+
+ int32_t EnableBuiltInAEC(bool enable) override;
+ int32_t EnableBuiltInNS(bool enable) override;
+
+ // Called from Java side so we can cache the address of the Java-manged
+ // `byte_buffer` in `direct_buffer_address_`. The size of the buffer
+ // is also stored in `direct_buffer_capacity_in_bytes_`.
+ // This method will be called by the WebRtcAudioRecord constructor, i.e.,
+ // on the same thread that this object is created on.
+ void CacheDirectBufferAddress(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ const JavaParamRef<jobject>& byte_buffer);
+
+ // Called periodically by the Java based WebRtcAudioRecord object when
+ // recording has started. Each call indicates that there are `length` new
+ // bytes recorded in the memory area `direct_buffer_address_` and it is
+ // now time to send these to the consumer.
+ // This method is called on a high-priority thread from Java. The name of
+ // the thread is 'AudioRecordThread'.
+ void DataIsRecorded(JNIEnv* env,
+ const JavaParamRef<jobject>& j_caller,
+ int length,
+ int64_t capture_timestamp_ns);
+
+ private:
+ // Stores thread ID in constructor.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to OnDataIsRecorded() from high-priority
+ // thread in Java. Detached during construction of this object.
+ SequenceChecker thread_checker_java_;
+
+ // Wraps the Java specific parts of the AudioRecordJni class.
+ JNIEnv* env_ = nullptr;
+ ScopedJavaGlobalRef<jobject> j_audio_record_;
+
+ const AudioParameters audio_parameters_;
+
+ // Delay estimate of the total round-trip delay (input + output).
+ // Fixed value set once in AttachAudioBuffer() and it can take one out of two
+ // possible values. See audio_common.h for details.
+ const int total_delay_ms_;
+
+ // Cached copy of address to direct audio buffer owned by `j_audio_record_`.
+ void* direct_buffer_address_;
+
+ // Number of bytes in the direct audio buffer owned by `j_audio_record_`.
+ size_t direct_buffer_capacity_in_bytes_;
+
+ // Number audio frames per audio buffer. Each audio frame corresponds to
+ // one sample of PCM mono data at 16 bits per sample. Hence, each audio
+ // frame contains 2 bytes (given that the Java layer only supports mono).
+ // Example: 480 for 48000 Hz or 441 for 44100 Hz.
+ size_t frames_per_buffer_;
+
+ bool initialized_;
+
+ bool recording_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_RECORD_JNI_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
new file mode 100644
index 0000000000..c1ff4c30e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc
@@ -0,0 +1,271 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+
+#include <utility>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "system_wrappers/include/field_trial.h"
+#include "system_wrappers/include/metrics.h"
+
+namespace webrtc {
+
+namespace jni {
+
+ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager) {
+ return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager);
+}
+
+AudioTrackJni::AudioTrackJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ const JavaRef<jobject>& j_webrtc_audio_track)
+ : j_audio_track_(env, j_webrtc_audio_track),
+ audio_parameters_(audio_parameters),
+ direct_buffer_address_(nullptr),
+ direct_buffer_capacity_in_bytes_(0),
+ frames_per_buffer_(0),
+ initialized_(false),
+ playing_(false),
+ audio_device_buffer_(nullptr) {
+ RTC_LOG(LS_INFO) << "ctor";
+ RTC_DCHECK(audio_parameters_.is_valid());
+ Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_,
+ jni::jlongFromPointer(this));
+ // Detach from this thread since construction is allowed to happen on a
+ // different thread.
+ thread_checker_.Detach();
+ thread_checker_java_.Detach();
+}
+
+AudioTrackJni::~AudioTrackJni() {
+ RTC_LOG(LS_INFO) << "dtor";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+}
+
+int32_t AudioTrackJni::Init() {
+ RTC_LOG(LS_INFO) << "Init";
+ env_ = AttachCurrentThreadIfNeeded();
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+int32_t AudioTrackJni::Terminate() {
+ RTC_LOG(LS_INFO) << "Terminate";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopPlayout();
+ thread_checker_.Detach();
+ return 0;
+}
+
+int32_t AudioTrackJni::InitPlayout() {
+ RTC_LOG(LS_INFO) << "InitPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (initialized_) {
+ // Already initialized.
+ return 0;
+ }
+ RTC_DCHECK(!playing_);
+ double buffer_size_factor =
+ strtod(webrtc::field_trial::FindFullName(
+ "WebRTC-AudioDevicePlayoutBufferSizeFactor")
+ .c_str(),
+ nullptr);
+ if (buffer_size_factor == 0)
+ buffer_size_factor = 1.0;
+ int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
+ env_, j_audio_track_, audio_parameters_.sample_rate(),
+ static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
+ if (requested_buffer_size_bytes < 0) {
+ RTC_LOG(LS_ERROR) << "InitPlayout failed";
+ return -1;
+ }
+ // Update UMA histograms for both the requested and actual buffer size.
+ // To avoid division by zero, we assume the sample rate is 48k if an invalid
+ // value is found.
+ const int sample_rate = audio_parameters_.sample_rate() <= 0
+ ? 48000
+ : audio_parameters_.sample_rate();
+ // This calculation assumes that audio is mono.
+ const int requested_buffer_size_ms =
+ (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
+ requested_buffer_size_ms, 0, 1000, 100);
+ int actual_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ if (actual_buffer_size_frames >= 0) {
+ const int actual_buffer_size_ms =
+ actual_buffer_size_frames * 1000 / sample_rate;
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
+ actual_buffer_size_ms, 0, 1000, 100);
+ }
+
+ initialized_ = true;
+ return 0;
+}
+
+bool AudioTrackJni::PlayoutIsInitialized() const {
+ return initialized_;
+}
+
+int32_t AudioTrackJni::StartPlayout() {
+ RTC_LOG(LS_INFO) << "StartPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (playing_) {
+ // Already playing.
+ return 0;
+ }
+ if (!initialized_) {
+ RTC_DLOG(LS_WARNING)
+ << "Playout can not start since InitPlayout must succeed first";
+ return 0;
+ }
+ if (!Java_WebRtcAudioTrack_startPlayout(env_, j_audio_track_)) {
+ RTC_LOG(LS_ERROR) << "StartPlayout failed";
+ return -1;
+ }
+ playing_ = true;
+ return 0;
+}
+
+int32_t AudioTrackJni::StopPlayout() {
+ RTC_LOG(LS_INFO) << "StopPlayout";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ // Log the difference in initial and current buffer level.
+ const int current_buffer_size_frames =
+ Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
+ const int initial_buffer_size_frames =
+ Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(env_, j_audio_track_);
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_HISTOGRAM_COUNTS(
+ "WebRTC.Audio.AndroidNativeAudioBufferSizeDifferenceFromInitialMs",
+ (current_buffer_size_frames - initial_buffer_size_frames) * 1000 /
+ sample_rate_hz,
+ -500, 100, 100);
+
+ if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) {
+ RTC_LOG(LS_ERROR) << "StopPlayout failed";
+ return -1;
+ }
+ // If we don't detach here, we will hit a RTC_DCHECK next time StartPlayout()
+ // is called since it will create a new Java thread.
+ thread_checker_java_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ direct_buffer_address_ = nullptr;
+ return 0;
+}
+
+bool AudioTrackJni::Playing() const {
+ return playing_;
+}
+
+bool AudioTrackJni::SpeakerVolumeIsAvailable() {
+ return true;
+}
+
+int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
+ RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioTrack_setStreamVolume(env_, j_audio_track_,
+ static_cast<int>(volume))
+ ? 0
+ : -1;
+}
+
+absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
+}
+
+absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ return 0;
+}
+
+absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ const uint32_t volume =
+ Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
+ RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume;
+ return volume;
+}
+
+int AudioTrackJni::GetPlayoutUnderrunCount() {
+ return Java_WebRtcAudioTrack_GetPlayoutUnderrunCount(env_, j_audio_track_);
+}
+
+// TODO(henrika): possibly add stereo support.
+void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ RTC_LOG(LS_INFO) << "AttachAudioBuffer";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
+ audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")";
+ audio_device_buffer_->SetPlayoutChannels(channels);
+}
+
+void AudioTrackJni::CacheDirectBufferAddress(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& byte_buffer) {
+ RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!direct_buffer_address_);
+ direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
+ jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
+ RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
+ direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
+ RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
+}
+
+// This method is called on a high-priority thread from Java. The name of
+// the thread is 'AudioRecordTrack'.
+void AudioTrackJni::GetPlayoutData(JNIEnv* env,
+ size_t length) {
+ RTC_DCHECK(thread_checker_java_.IsCurrent());
+ const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
+ RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
+ if (!audio_device_buffer_) {
+ RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
+ return;
+ }
+ // Pull decoded data (in 16-bit PCM format) from jitter buffer.
+ int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
+ if (samples <= 0) {
+ RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
+ return;
+ }
+ RTC_DCHECK_EQ(samples, frames_per_buffer_);
+ // Copy decoded data into common byte buffer to ensure that it can be
+ // written to the Java based audio track.
+ samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
+ RTC_DCHECK_EQ(length, bytes_per_frame * samples);
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h
new file mode 100644
index 0000000000..5ca907c42f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/audio_track_jni.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
+
+#include <jni.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio output support for Android using the Java
+// AudioTrack interface. Most of the work is done by its Java counterpart in
+// WebRtcAudioTrack.java. This class is created and lives on a thread in
+// C++-land, but decoded audio buffers are requested on a high-priority
+// thread managed by the Java class.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread
+//
+// This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed.
+// Additional thread checking guarantees that no other (possibly non attached)
+// thread is used.
+class AudioTrackJni : public AudioOutput {
+ public:
+ static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_context,
+ const JavaRef<jobject>& j_audio_manager);
+
+ AudioTrackJni(JNIEnv* env,
+ const AudioParameters& audio_parameters,
+ const JavaRef<jobject>& j_webrtc_audio_track);
+ ~AudioTrackJni() override;
+
+ int32_t Init() override;
+ int32_t Terminate() override;
+
+ int32_t InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int32_t StartPlayout() override;
+ int32_t StopPlayout() override;
+ bool Playing() const override;
+
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+ int GetPlayoutUnderrunCount() override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ // Called from Java side so we can cache the address of the Java-manged
+ // `byte_buffer` in `direct_buffer_address_`. The size of the buffer
+ // is also stored in `direct_buffer_capacity_in_bytes_`.
+ // Called on the same thread as the creating thread.
+ void CacheDirectBufferAddress(JNIEnv* env,
+ const JavaParamRef<jobject>& byte_buffer);
+ // Called periodically by the Java based WebRtcAudioTrack object when
+ // playout has started. Each call indicates that `length` new bytes should
+ // be written to the memory area `direct_buffer_address_` for playout.
+ // This method is called on a high-priority thread from Java. The name of
+ // the thread is 'AudioTrackThread'.
+ void GetPlayoutData(JNIEnv* env, size_t length);
+
+ private:
+ // Stores thread ID in constructor.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to OnGetPlayoutData() from high-priority
+ // thread in Java. Detached during construction of this object.
+ SequenceChecker thread_checker_java_;
+
+ // Wraps the Java specific parts of the AudioTrackJni class.
+ JNIEnv* env_ = nullptr;
+ ScopedJavaGlobalRef<jobject> j_audio_track_;
+
+ // Contains audio parameters provided to this class at construction by the
+ // AudioManager.
+ const AudioParameters audio_parameters_;
+
+ // Cached copy of address to direct audio buffer owned by `j_audio_track_`.
+ void* direct_buffer_address_;
+
+ // Number of bytes in the direct audio buffer owned by `j_audio_track_`.
+ size_t direct_buffer_capacity_in_bytes_;
+
+ // Number of audio frames per audio buffer. Each audio frame corresponds to
+ // one sample of PCM mono data at 16 bits per sample. Hence, each audio
+ // frame contains 2 bytes (given that the Java layer only supports mono).
+ // Example: 480 for 48000 Hz or 441 for 44100 Hz.
+ size_t frames_per_buffer_;
+
+ bool initialized_;
+
+ bool playing_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
+ // and therefore outlives this object.
+ AudioDeviceBuffer* audio_device_buffer_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc
new file mode 100644
index 0000000000..1c3cbe4bbe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/java_audio_device_module.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "sdk/android/generated_java_audio_jni/JavaAudioDeviceModule_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
+#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_JavaAudioDeviceModule_CreateAudioDeviceModule(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_context,
+ const JavaParamRef<jobject>& j_audio_manager,
+ const JavaParamRef<jobject>& j_webrtc_audio_record,
+ const JavaParamRef<jobject>& j_webrtc_audio_track,
+ int input_sample_rate,
+ int output_sample_rate,
+ jboolean j_use_stereo_input,
+ jboolean j_use_stereo_output) {
+ AudioParameters input_parameters;
+ AudioParameters output_parameters;
+ GetAudioParameters(env, j_context, j_audio_manager, input_sample_rate,
+ output_sample_rate, j_use_stereo_input,
+ j_use_stereo_output, &input_parameters,
+ &output_parameters);
+ auto audio_input = std::make_unique<AudioRecordJni>(
+ env, input_parameters, kHighLatencyModeDelayEstimateInMilliseconds,
+ j_webrtc_audio_record);
+ auto audio_output = std::make_unique<AudioTrackJni>(env, output_parameters,
+ j_webrtc_audio_track);
+ return jlongFromPointer(CreateAudioDeviceModuleFromInputAndOutput(
+ AudioDeviceModule::kAndroidJavaAudio,
+ j_use_stereo_input, j_use_stereo_output,
+ kHighLatencyModeDelayEstimateInMilliseconds,
+ std::move(audio_input), std::move(audio_output))
+ .release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc
new file mode 100644
index 0000000000..300019a161
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.cc
@@ -0,0 +1,144 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+#include <SLES/OpenSLES.h>
+
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Returns a string representation given an integer SL_RESULT_XXX code.
+// The mapping can be found in <SLES/OpenSLES.h>.
+const char* GetSLErrorString(size_t code) {
+ static const char* sl_error_strings[] = {
+ "SL_RESULT_SUCCESS", // 0
+ "SL_RESULT_PRECONDITIONS_VIOLATED", // 1
+ "SL_RESULT_PARAMETER_INVALID", // 2
+ "SL_RESULT_MEMORY_FAILURE", // 3
+ "SL_RESULT_RESOURCE_ERROR", // 4
+ "SL_RESULT_RESOURCE_LOST", // 5
+ "SL_RESULT_IO_ERROR", // 6
+ "SL_RESULT_BUFFER_INSUFFICIENT", // 7
+ "SL_RESULT_CONTENT_CORRUPTED", // 8
+ "SL_RESULT_CONTENT_UNSUPPORTED", // 9
+ "SL_RESULT_CONTENT_NOT_FOUND", // 10
+ "SL_RESULT_PERMISSION_DENIED", // 11
+ "SL_RESULT_FEATURE_UNSUPPORTED", // 12
+ "SL_RESULT_INTERNAL_ERROR", // 13
+ "SL_RESULT_UNKNOWN_ERROR", // 14
+ "SL_RESULT_OPERATION_ABORTED", // 15
+ "SL_RESULT_CONTROL_LOST", // 16
+ };
+
+ if (code >= arraysize(sl_error_strings)) {
+ return "SL_RESULT_UNKNOWN_ERROR";
+ }
+ return sl_error_strings[code];
+}
+
+SLDataFormat_PCM CreatePCMConfiguration(size_t channels,
+ int sample_rate,
+ size_t bits_per_sample) {
+ RTC_CHECK_EQ(bits_per_sample, SL_PCMSAMPLEFORMAT_FIXED_16);
+ SLDataFormat_PCM format;
+ format.formatType = SL_DATAFORMAT_PCM;
+ format.numChannels = static_cast<SLuint32>(channels);
+ // Note that, the unit of sample rate is actually in milliHertz and not Hertz.
+ switch (sample_rate) {
+ case 8000:
+ format.samplesPerSec = SL_SAMPLINGRATE_8;
+ break;
+ case 16000:
+ format.samplesPerSec = SL_SAMPLINGRATE_16;
+ break;
+ case 22050:
+ format.samplesPerSec = SL_SAMPLINGRATE_22_05;
+ break;
+ case 32000:
+ format.samplesPerSec = SL_SAMPLINGRATE_32;
+ break;
+ case 44100:
+ format.samplesPerSec = SL_SAMPLINGRATE_44_1;
+ break;
+ case 48000:
+ format.samplesPerSec = SL_SAMPLINGRATE_48;
+ break;
+ case 64000:
+ format.samplesPerSec = SL_SAMPLINGRATE_64;
+ break;
+ case 88200:
+ format.samplesPerSec = SL_SAMPLINGRATE_88_2;
+ break;
+ case 96000:
+ format.samplesPerSec = SL_SAMPLINGRATE_96;
+ break;
+ default:
+ RTC_CHECK(false) << "Unsupported sample rate: " << sample_rate;
+ break;
+ }
+ format.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+ format.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
+ format.endianness = SL_BYTEORDER_LITTLEENDIAN;
+ if (format.numChannels == 1) {
+ format.channelMask = SL_SPEAKER_FRONT_CENTER;
+ } else if (format.numChannels == 2) {
+ format.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
+ } else {
+ RTC_CHECK(false) << "Unsupported number of channels: "
+ << format.numChannels;
+ }
+ return format;
+}
+
+OpenSLEngineManager::OpenSLEngineManager() {
+ thread_checker_.Detach();
+}
+
+SLObjectItf OpenSLEngineManager::GetOpenSLEngine() {
+ RTC_LOG(LS_INFO) << "GetOpenSLEngine";
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // OpenSL ES for Android only supports a single engine per application.
+ // If one already has been created, return existing object instead of
+ // creating a new.
+ if (engine_object_.Get() != nullptr) {
+ RTC_LOG(LS_WARNING)
+ << "The OpenSL ES engine object has already been created";
+ return engine_object_.Get();
+ }
+ // Create the engine object in thread safe mode.
+ const SLEngineOption option[] = {
+ {SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE)}};
+ SLresult result =
+ slCreateEngine(engine_object_.Receive(), 1, option, 0, NULL, NULL);
+ if (result != SL_RESULT_SUCCESS) {
+ RTC_LOG(LS_ERROR) << "slCreateEngine() failed: "
+ << GetSLErrorString(result);
+ engine_object_.Reset();
+ return nullptr;
+ }
+ // Realize the SL Engine in synchronous mode.
+ result = engine_object_->Realize(engine_object_.Get(), SL_BOOLEAN_FALSE);
+ if (result != SL_RESULT_SUCCESS) {
+ RTC_LOG(LS_ERROR) << "Realize() failed: " << GetSLErrorString(result);
+ engine_object_.Reset();
+ return nullptr;
+ }
+ // Finally return the SLObjectItf interface of the engine object.
+ return engine_object_.Get();
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h
new file mode 100644
index 0000000000..9dd1e0f7d7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_common.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
+
+#include <SLES/OpenSLES.h>
+#include <stddef.h>
+
+#include "api/ref_counted_base.h"
+#include "api/sequence_checker.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace webrtc {
+
+namespace jni {
+
+// Returns a string representation given an integer SL_RESULT_XXX code.
+// The mapping can be found in <SLES/OpenSLES.h>.
+const char* GetSLErrorString(size_t code);
+
+// Configures an SL_DATAFORMAT_PCM structure based on native audio parameters.
+SLDataFormat_PCM CreatePCMConfiguration(size_t channels,
+ int sample_rate,
+ size_t bits_per_sample);
+
+// Helper class for using SLObjectItf interfaces.
+template <typename SLType, typename SLDerefType>
+class ScopedSLObject {
+ public:
+ ScopedSLObject() : obj_(nullptr) {}
+
+ ~ScopedSLObject() { Reset(); }
+
+ SLType* Receive() {
+ RTC_DCHECK(!obj_);
+ return &obj_;
+ }
+
+ SLDerefType operator->() { return *obj_; }
+
+ SLType Get() const { return obj_; }
+
+ void Reset() {
+ if (obj_) {
+ (*obj_)->Destroy(obj_);
+ obj_ = nullptr;
+ }
+ }
+
+ private:
+ SLType obj_;
+};
+
+typedef ScopedSLObject<SLObjectItf, const SLObjectItf_*> ScopedSLObjectItf;
+
+// Creates and realizes the main (global) Open SL engine object and returns
+// a reference to it. The engine object is only created at the first call
+// since OpenSL ES for Android only supports a single engine per application.
+// Subsequent calls returns the already created engine.
+// Note: This class must be used single threaded and this is enforced by a
+// thread checker.
+class OpenSLEngineManager
+ : public rtc::RefCountedNonVirtual<OpenSLEngineManager> {
+ public:
+ OpenSLEngineManager();
+ ~OpenSLEngineManager() = default;
+ SLObjectItf GetOpenSLEngine();
+
+ private:
+ SequenceChecker thread_checker_;
+ // This object is the global entry point of the OpenSL ES API.
+ // After creating the engine object, the application can obtain this object‘s
+ // SLEngineItf interface. This interface contains creation methods for all
+ // the other object types in the API. None of these interface are realized
+ // by this class. It only provides access to the global engine object.
+ ScopedSLObjectItf engine_object_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_COMMON_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc
new file mode 100644
index 0000000000..6300a3abe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.cc
@@ -0,0 +1,446 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_player.h"
+
+#include <android/log.h>
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+
+#define TAG "OpenSLESPlayer"
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
+
+#define RETURN_ON_ERROR(op, ...) \
+ do { \
+ SLresult err = (op); \
+ if (err != SL_RESULT_SUCCESS) { \
+ ALOGE("%s failed: %s", #op, GetSLErrorString(err)); \
+ return __VA_ARGS__; \
+ } \
+ } while (0)
+
+namespace webrtc {
+
+namespace jni {
+
+OpenSLESPlayer::OpenSLESPlayer(
+ const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+ : audio_parameters_(audio_parameters),
+ audio_device_buffer_(nullptr),
+ initialized_(false),
+ playing_(false),
+ buffer_index_(0),
+ engine_manager_(std::move(engine_manager)),
+ engine_(nullptr),
+ player_(nullptr),
+ simple_buffer_queue_(nullptr),
+ volume_(nullptr),
+ last_play_time_(0) {
+ ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+ // Use native audio output parameters provided by the audio manager and
+ // define the PCM format structure.
+ pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
+ audio_parameters_.sample_rate(),
+ audio_parameters_.bits_per_sample());
+ // Detach from this thread since we want to use the checker to verify calls
+ // from the internal audio thread.
+ thread_checker_opensles_.Detach();
+}
+
+OpenSLESPlayer::~OpenSLESPlayer() {
+ ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ DestroyAudioPlayer();
+ DestroyMix();
+ engine_ = nullptr;
+ RTC_DCHECK(!engine_);
+ RTC_DCHECK(!output_mix_.Get());
+ RTC_DCHECK(!player_);
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_DCHECK(!volume_);
+}
+
+int OpenSLESPlayer::Init() {
+ ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (audio_parameters_.channels() == 2) {
+ ALOGW("Stereo mode is enabled");
+ }
+ return 0;
+}
+
+int OpenSLESPlayer::Terminate() {
+ ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopPlayout();
+ return 0;
+}
+
+int OpenSLESPlayer::InitPlayout() {
+ ALOGD("InitPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!playing_);
+ if (!ObtainEngineInterface()) {
+ ALOGE("Failed to obtain SL Engine interface");
+ return -1;
+ }
+ CreateMix();
+ initialized_ = true;
+ buffer_index_ = 0;
+ return 0;
+}
+
+bool OpenSLESPlayer::PlayoutIsInitialized() const {
+ return initialized_;
+}
+
+int OpenSLESPlayer::StartPlayout() {
+ ALOGD("StartPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!playing_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetPlayout();
+ }
+ // The number of lower latency audio players is limited, hence we create the
+ // audio player in Start() and destroy it in Stop().
+ CreateAudioPlayer();
+ // Fill up audio buffers to avoid initial glitch and to ensure that playback
+ // starts when mode is later changed to SL_PLAYSTATE_PLAYING.
+ // TODO(henrika): we can save some delay by only making one call to
+ // EnqueuePlayoutData. Most likely not worth the risk of adding a glitch.
+ last_play_time_ = rtc::Time();
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ EnqueuePlayoutData(true);
+ }
+ // Start streaming data by setting the play state to SL_PLAYSTATE_PLAYING.
+ // For a player object, when the object is in the SL_PLAYSTATE_PLAYING
+ // state, adding buffers will implicitly start playback.
+ RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_PLAYING), -1);
+ playing_ = (GetPlayState() == SL_PLAYSTATE_PLAYING);
+ RTC_DCHECK(playing_);
+ return 0;
+}
+
+int OpenSLESPlayer::StopPlayout() {
+ ALOGD("StopPlayout[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !playing_) {
+ return 0;
+ }
+ // Stop playing by setting the play state to SL_PLAYSTATE_STOPPED.
+ RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_STOPPED), -1);
+ // Clear the buffer queue to flush out any remaining data.
+ RETURN_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_), -1);
+#if RTC_DCHECK_IS_ON
+ // Verify that the buffer queue is in fact cleared as it should.
+ SLAndroidSimpleBufferQueueState buffer_queue_state;
+ (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &buffer_queue_state);
+ RTC_DCHECK_EQ(0, buffer_queue_state.count);
+ RTC_DCHECK_EQ(0, buffer_queue_state.index);
+#endif
+ // The number of lower latency audio players is limited, hence we create the
+ // audio player in Start() and destroy it in Stop().
+ DestroyAudioPlayer();
+ thread_checker_opensles_.Detach();
+ initialized_ = false;
+ playing_ = false;
+ return 0;
+}
+
+bool OpenSLESPlayer::Playing() const {
+ return playing_;
+}
+
+bool OpenSLESPlayer::SpeakerVolumeIsAvailable() {
+ return false;
+}
+
+int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) {
+ return -1;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
+ return absl::nullopt;
+}
+
+void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ ALOGD("AttachAudioBuffer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ audio_device_buffer_ = audioBuffer;
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz);
+ audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetPlayoutChannels(%zu)", channels);
+ audio_device_buffer_->SetPlayoutChannels(channels);
+ RTC_CHECK(audio_device_buffer_);
+ AllocateDataBuffers();
+}
+
+void OpenSLESPlayer::AllocateDataBuffers() {
+ ALOGD("AllocateDataBuffers");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to ask for any number
+ // of samples (and not only multiple of 10ms) to match the native OpenSL ES
+ // buffer size. The native buffer size corresponds to the
+ // PROPERTY_OUTPUT_FRAMES_PER_BUFFER property which is the number of audio
+ // frames that the HAL (Hardware Abstraction Layer) buffer can hold. It is
+ // recommended to construct audio buffers so that they contain an exact
+ // multiple of this number. If so, callbacks will occur at regular intervals,
+ // which reduces jitter.
+ const size_t buffer_size_in_samples =
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels();
+ ALOGD("native buffer size: %zu", buffer_size_in_samples);
+ ALOGD("native buffer size in ms: %.2f",
+ audio_parameters_.GetBufferSizeInMilliseconds());
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+ // Allocated memory for audio buffers.
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ audio_buffers_[i].reset(new SLint16[buffer_size_in_samples]);
+ }
+}
+
+bool OpenSLESPlayer::ObtainEngineInterface() {
+ ALOGD("ObtainEngineInterface");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (engine_)
+ return true;
+ // Get access to (or create if not already existing) the global OpenSL Engine
+ // object.
+ SLObjectItf engine_object = engine_manager_->GetOpenSLEngine();
+ if (engine_object == nullptr) {
+ ALOGE("Failed to access the global OpenSL engine");
+ return false;
+ }
+ // Get the SL Engine Interface which is implicit.
+ RETURN_ON_ERROR(
+ (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine_),
+ false);
+ return true;
+}
+
+bool OpenSLESPlayer::CreateMix() {
+ ALOGD("CreateMix");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(engine_);
+ if (output_mix_.Get())
+ return true;
+
+ // Create the ouput mix on the engine object. No interfaces will be used.
+ RETURN_ON_ERROR((*engine_)->CreateOutputMix(engine_, output_mix_.Receive(), 0,
+ nullptr, nullptr),
+ false);
+ RETURN_ON_ERROR(output_mix_->Realize(output_mix_.Get(), SL_BOOLEAN_FALSE),
+ false);
+ return true;
+}
+
+void OpenSLESPlayer::DestroyMix() {
+ ALOGD("DestroyMix");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!output_mix_.Get())
+ return;
+ output_mix_.Reset();
+}
+
+bool OpenSLESPlayer::CreateAudioPlayer() {
+ ALOGD("CreateAudioPlayer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(output_mix_.Get());
+ if (player_object_.Get())
+ return true;
+ RTC_DCHECK(!player_);
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_DCHECK(!volume_);
+
+ // source: Android Simple Buffer Queue Data Locator is source.
+ SLDataLocator_AndroidSimpleBufferQueue simple_buffer_queue = {
+ SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
+ static_cast<SLuint32>(kNumOfOpenSLESBuffers)};
+ SLDataSource audio_source = {&simple_buffer_queue, &pcm_format_};
+
+ // sink: OutputMix-based data is sink.
+ SLDataLocator_OutputMix locator_output_mix = {SL_DATALOCATOR_OUTPUTMIX,
+ output_mix_.Get()};
+ SLDataSink audio_sink = {&locator_output_mix, nullptr};
+
+ // Define interfaces that we indend to use and realize.
+ const SLInterfaceID interface_ids[] = {SL_IID_ANDROIDCONFIGURATION,
+ SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
+ const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
+ SL_BOOLEAN_TRUE};
+
+ // Create the audio player on the engine interface.
+ RETURN_ON_ERROR(
+ (*engine_)->CreateAudioPlayer(
+ engine_, player_object_.Receive(), &audio_source, &audio_sink,
+ arraysize(interface_ids), interface_ids, interface_required),
+ false);
+
+ // Use the Android configuration interface to set platform-specific
+ // parameters. Should be done before player is realized.
+ SLAndroidConfigurationItf player_config;
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(),
+ SL_IID_ANDROIDCONFIGURATION, &player_config),
+ false);
+ // Set audio player configuration to SL_ANDROID_STREAM_VOICE which
+ // corresponds to android.media.AudioManager.STREAM_VOICE_CALL.
+ SLint32 stream_type = SL_ANDROID_STREAM_VOICE;
+ RETURN_ON_ERROR(
+ (*player_config)
+ ->SetConfiguration(player_config, SL_ANDROID_KEY_STREAM_TYPE,
+ &stream_type, sizeof(SLint32)),
+ false);
+
+ // Realize the audio player object after configuration has been set.
+ RETURN_ON_ERROR(
+ player_object_->Realize(player_object_.Get(), SL_BOOLEAN_FALSE), false);
+
+ // Get the SLPlayItf interface on the audio player.
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(), SL_IID_PLAY, &player_),
+ false);
+
+ // Get the SLAndroidSimpleBufferQueueItf interface on the audio player.
+ RETURN_ON_ERROR(
+ player_object_->GetInterface(player_object_.Get(), SL_IID_BUFFERQUEUE,
+ &simple_buffer_queue_),
+ false);
+
+ // Register callback method for the Android Simple Buffer Queue interface.
+ // This method will be called when the native audio layer needs audio data.
+ RETURN_ON_ERROR((*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_,
+ SimpleBufferQueueCallback, this),
+ false);
+
+ // Get the SLVolumeItf interface on the audio player.
+ RETURN_ON_ERROR(player_object_->GetInterface(player_object_.Get(),
+ SL_IID_VOLUME, &volume_),
+ false);
+
+ // TODO(henrika): might not be required to set volume to max here since it
+ // seems to be default on most devices. Might be required for unit tests.
+ // RETURN_ON_ERROR((*volume_)->SetVolumeLevel(volume_, 0), false);
+
+ return true;
+}
+
+void OpenSLESPlayer::DestroyAudioPlayer() {
+ ALOGD("DestroyAudioPlayer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!player_object_.Get())
+ return;
+ (*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr);
+ player_object_.Reset();
+ player_ = nullptr;
+ simple_buffer_queue_ = nullptr;
+ volume_ = nullptr;
+}
+
+// static
+void OpenSLESPlayer::SimpleBufferQueueCallback(
+ SLAndroidSimpleBufferQueueItf caller,
+ void* context) {
+ OpenSLESPlayer* stream = reinterpret_cast<OpenSLESPlayer*>(context);
+ stream->FillBufferQueue();
+}
+
+void OpenSLESPlayer::FillBufferQueue() {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ SLuint32 state = GetPlayState();
+ if (state != SL_PLAYSTATE_PLAYING) {
+ ALOGW("Buffer callback in non-playing state!");
+ return;
+ }
+ EnqueuePlayoutData(false);
+}
+
+void OpenSLESPlayer::EnqueuePlayoutData(bool silence) {
+ // Check delta time between two successive callbacks and provide a warning
+ // if it becomes very large.
+ // TODO(henrika): using 150ms as upper limit but this value is rather random.
+ const uint32_t current_time = rtc::Time();
+ const uint32_t diff = current_time - last_play_time_;
+ if (diff > 150) {
+ ALOGW("Bad OpenSL ES playout timing, dT=%u [ms]", diff);
+ }
+ last_play_time_ = current_time;
+ SLint8* audio_ptr8 =
+ reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get());
+ if (silence) {
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ // Avoid acquiring real audio data from WebRTC and fill the buffer with
+ // zeros instead. Used to prime the buffer with silence and to avoid asking
+ // for audio data from two different threads.
+ memset(audio_ptr8, 0, audio_parameters_.GetBytesPerBuffer());
+ } else {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ // Read audio data from the WebRTC source using the FineAudioBuffer object
+ // to adjust for differences in buffer size between WebRTC (10ms) and native
+ // OpenSL ES. Use hardcoded delay estimate since OpenSL ES does not support
+ // delay estimation.
+ fine_audio_buffer_->GetPlayoutData(
+ rtc::ArrayView<int16_t>(audio_buffers_[buffer_index_].get(),
+ audio_parameters_.frames_per_buffer() *
+ audio_parameters_.channels()),
+ 25);
+ }
+ // Enqueue the decoded audio buffer for playback.
+ SLresult err = (*simple_buffer_queue_)
+ ->Enqueue(simple_buffer_queue_, audio_ptr8,
+ audio_parameters_.GetBytesPerBuffer());
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("Enqueue failed: %d", err);
+ }
+ buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers;
+}
+
+SLuint32 OpenSLESPlayer::GetPlayState() const {
+ RTC_DCHECK(player_);
+ SLuint32 state;
+ SLresult err = (*player_)->GetPlayState(player_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetPlayState failed: %d", err);
+ }
+ return state;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h
new file mode 100644
index 0000000000..8a22432309
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_player.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+#include <memory>
+
+#include "absl/types/optional.h"
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio output support for Android using the
+// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+// Decoded audio buffers are requested on a dedicated internal thread managed by
+// the OpenSL ES layer.
+//
+// The existing design forces the user to call InitPlayout() after Stoplayout()
+// to be able to call StartPlayout() again. This is inline with how the Java-
+// based implementation works.
+//
+// OpenSL ES is a native C API which have no Dalvik-related overhead such as
+// garbage collection pauses and it supports reduced audio output latency.
+// If the device doesn't claim this feature but supports API level 9 (Android
+// platform version 2.3) or later, then we can still use the OpenSL ES APIs but
+// the output latency may be higher.
+class OpenSLESPlayer : public AudioOutput {
+ public:
+ // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
+ // required for lower latency. Beginning with API level 18 (Android 4.3), a
+ // buffer count of 1 is sufficient for lower latency. In addition, the buffer
+ // size and sample rate must be compatible with the device's native output
+ // configuration provided via the audio manager at construction.
+ // TODO(henrika): perhaps set this value dynamically based on OS version.
+ static const int kNumOfOpenSLESBuffers = 2;
+
+ OpenSLESPlayer(const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+ ~OpenSLESPlayer() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitPlayout() override;
+ bool PlayoutIsInitialized() const override;
+
+ int StartPlayout() override;
+ int StopPlayout() override;
+ bool Playing() const override;
+
+ bool SpeakerVolumeIsAvailable() override;
+ int SetSpeakerVolume(uint32_t volume) override;
+ absl::optional<uint32_t> SpeakerVolume() const override;
+ absl::optional<uint32_t> MaxSpeakerVolume() const override;
+ absl::optional<uint32_t> MinSpeakerVolume() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;
+
+ int GetPlayoutUnderrunCount() override { return -1; }
+
+ private:
+ // These callback methods are called when data is required for playout.
+ // They are both called from an internal "OpenSL ES thread" which is not
+ // attached to the Dalvik VM.
+ static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller,
+ void* context);
+ void FillBufferQueue();
+ // Reads audio data in PCM format using the AudioDeviceBuffer.
+ // Can be called both on the main thread (during Start()) and from the
+ // internal audio thread while output streaming is active.
+ // If the `silence` flag is set, the audio is filled with zeros instead of
+ // asking the WebRTC layer for real audio data. This procedure is also known
+ // as audio priming.
+ void EnqueuePlayoutData(bool silence);
+
+ // Allocate memory for audio buffers which will be used to render audio
+ // via the SLAndroidSimpleBufferQueueItf interface.
+ void AllocateDataBuffers();
+
+ // Obtaines the SL Engine Interface from the existing global Engine object.
+ // The interface exposes creation methods of all the OpenSL ES object types.
+ // This method defines the `engine_` member variable.
+ bool ObtainEngineInterface();
+
+ // Creates/destroys the output mix object.
+ bool CreateMix();
+ void DestroyMix();
+
+ // Creates/destroys the audio player and the simple-buffer object.
+ // Also creates the volume object.
+ bool CreateAudioPlayer();
+ void DestroyAudioPlayer();
+
+ SLuint32 GetPlayState() const;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to SimpleBufferQueueCallback() from internal
+ // non-application thread which is not attached to the Dalvik JVM.
+ // Detached during construction of this object.
+ SequenceChecker thread_checker_opensles_;
+
+ const AudioParameters audio_parameters_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ bool initialized_;
+ bool playing_;
+
+ // PCM-type format definition.
+ // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if
+ // 32-bit float representation is needed.
+ SLDataFormat_PCM pcm_format_;
+
+ // Queue of audio buffers to be used by the player object for rendering
+ // audio.
+ std::unique_ptr<SLint16[]> audio_buffers_[kNumOfOpenSLESBuffers];
+
+ // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data
+ // in chunks of 10ms. It then allows for this data to be pulled in
+ // a finer or coarser granularity. I.e. interacting with this class instead
+ // of directly with the AudioDeviceBuffer one can ask for any number of
+ // audio data samples.
+ // Example: native buffer size can be 192 audio frames at 48kHz sample rate.
+ // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192
+ // in each callback (one every 4th ms). This class can then ask for 192 and
+ // the FineAudioBuffer will ask WebRTC for new data approximately only every
+ // second callback and also cache non-utilized audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue.
+ // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
+ int buffer_index_;
+
+ const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+ // This interface exposes creation methods for all the OpenSL ES object types.
+ // It is the OpenSL ES API entry point.
+ SLEngineItf engine_;
+
+ // Output mix object to be used by the player object.
+ ScopedSLObjectItf output_mix_;
+
+ // The audio player media object plays out audio to the speakers. It also
+ // supports volume control.
+ ScopedSLObjectItf player_object_;
+
+ // This interface is supported on the audio player and it controls the state
+ // of the audio player.
+ SLPlayItf player_;
+
+ // The Android Simple Buffer Queue interface is supported on the audio player
+ // and it provides methods to send audio data from the source to the audio
+ // player for rendering.
+ SLAndroidSimpleBufferQueueItf simple_buffer_queue_;
+
+ // This interface exposes controls for manipulating the object’s audio volume
+ // properties. This interface is supported on the Audio Player object.
+ SLVolumeItf volume_;
+
+ // Last time the OpenSL ES layer asked for audio data to play out.
+ uint32_t last_play_time_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_PLAYER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc
new file mode 100644
index 0000000000..c426a8d92b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.cc
@@ -0,0 +1,445 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
+
+#include <android/log.h>
+
+#include <memory>
+
+#include "api/array_view.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+
+#define TAG "OpenSLESRecorder"
+#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
+#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
+#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
+
+#define LOG_ON_ERROR(op) \
+ [](SLresult err) { \
+ if (err != SL_RESULT_SUCCESS) { \
+ ALOGE("%s:%d %s failed: %s", __FILE__, __LINE__, #op, \
+ GetSLErrorString(err)); \
+ return true; \
+ } \
+ return false; \
+ }(op)
+
+namespace webrtc {
+
+namespace jni {
+
+OpenSLESRecorder::OpenSLESRecorder(
+ const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager)
+ : audio_parameters_(audio_parameters),
+ audio_device_buffer_(nullptr),
+ initialized_(false),
+ recording_(false),
+ engine_manager_(std::move(engine_manager)),
+ engine_(nullptr),
+ recorder_(nullptr),
+ simple_buffer_queue_(nullptr),
+ buffer_index_(0),
+ last_rec_time_(0) {
+ ALOGD("ctor[tid=%d]", rtc::CurrentThreadId());
+ // Detach from this thread since we want to use the checker to verify calls
+ // from the internal audio thread.
+ thread_checker_opensles_.Detach();
+ // Use native audio output parameters provided by the audio manager and
+ // define the PCM format structure.
+ pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(),
+ audio_parameters_.sample_rate(),
+ audio_parameters_.bits_per_sample());
+}
+
+OpenSLESRecorder::~OpenSLESRecorder() {
+ ALOGD("dtor[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ Terminate();
+ DestroyAudioRecorder();
+ engine_ = nullptr;
+ RTC_DCHECK(!engine_);
+ RTC_DCHECK(!recorder_);
+ RTC_DCHECK(!simple_buffer_queue_);
+}
+
+int OpenSLESRecorder::Init() {
+ ALOGD("Init[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (audio_parameters_.channels() == 2) {
+ ALOGD("Stereo mode is enabled");
+ }
+ return 0;
+}
+
+int OpenSLESRecorder::Terminate() {
+ ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ StopRecording();
+ return 0;
+}
+
+int OpenSLESRecorder::InitRecording() {
+ ALOGD("InitRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!initialized_);
+ RTC_DCHECK(!recording_);
+ if (!ObtainEngineInterface()) {
+ ALOGE("Failed to obtain SL Engine interface");
+ return -1;
+ }
+ CreateAudioRecorder();
+ initialized_ = true;
+ buffer_index_ = 0;
+ return 0;
+}
+
+bool OpenSLESRecorder::RecordingIsInitialized() const {
+ return initialized_;
+}
+
+int OpenSLESRecorder::StartRecording() {
+ ALOGD("StartRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(initialized_);
+ RTC_DCHECK(!recording_);
+ if (fine_audio_buffer_) {
+ fine_audio_buffer_->ResetRecord();
+ }
+ // Add buffers to the queue before changing state to SL_RECORDSTATE_RECORDING
+ // to ensure that recording starts as soon as the state is modified. On some
+ // devices, SLAndroidSimpleBufferQueue::Clear() used in Stop() does not flush
+ // the buffers as intended and we therefore check the number of buffers
+ // already queued first. Enqueue() can return SL_RESULT_BUFFER_INSUFFICIENT
+ // otherwise.
+ int num_buffers_in_queue = GetBufferCount();
+ for (int i = 0; i < kNumOfOpenSLESBuffers - num_buffers_in_queue; ++i) {
+ if (!EnqueueAudioBuffer()) {
+ recording_ = false;
+ return -1;
+ }
+ }
+ num_buffers_in_queue = GetBufferCount();
+ RTC_DCHECK_EQ(num_buffers_in_queue, kNumOfOpenSLESBuffers);
+ LogBufferState();
+ // Start audio recording by changing the state to SL_RECORDSTATE_RECORDING.
+ // Given that buffers are already enqueued, recording should start at once.
+ // The macro returns -1 if recording fails to start.
+ last_rec_time_ = rtc::Time();
+ if (LOG_ON_ERROR(
+ (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_RECORDING))) {
+ return -1;
+ }
+ recording_ = (GetRecordState() == SL_RECORDSTATE_RECORDING);
+ RTC_DCHECK(recording_);
+ return 0;
+}
+
+int OpenSLESRecorder::StopRecording() {
+ ALOGD("StopRecording[tid=%d]", rtc::CurrentThreadId());
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!initialized_ || !recording_) {
+ return 0;
+ }
+ // Stop recording by setting the record state to SL_RECORDSTATE_STOPPED.
+ if (LOG_ON_ERROR(
+ (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_STOPPED))) {
+ return -1;
+ }
+ // Clear the buffer queue to get rid of old data when resuming recording.
+ if (LOG_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_))) {
+ return -1;
+ }
+ thread_checker_opensles_.Detach();
+ initialized_ = false;
+ recording_ = false;
+ return 0;
+}
+
+bool OpenSLESRecorder::Recording() const {
+ return recording_;
+}
+
+void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) {
+ ALOGD("AttachAudioBuffer");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_CHECK(audio_buffer);
+ audio_device_buffer_ = audio_buffer;
+ // Ensure that the audio device buffer is informed about the native sample
+ // rate used on the recording side.
+ const int sample_rate_hz = audio_parameters_.sample_rate();
+ ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz);
+ audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz);
+ // Ensure that the audio device buffer is informed about the number of
+ // channels preferred by the OS on the recording side.
+ const size_t channels = audio_parameters_.channels();
+ ALOGD("SetRecordingChannels(%zu)", channels);
+ audio_device_buffer_->SetRecordingChannels(channels);
+ // Allocated memory for internal data buffers given existing audio parameters.
+ AllocateDataBuffers();
+}
+
+bool OpenSLESRecorder::IsAcousticEchoCancelerSupported() const {
+ return false;
+}
+
+bool OpenSLESRecorder::IsNoiseSuppressorSupported() const {
+ return false;
+}
+
+int OpenSLESRecorder::EnableBuiltInAEC(bool enable) {
+ ALOGD("EnableBuiltInAEC(%d)", enable);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ ALOGE("Not implemented");
+ return 0;
+}
+
+int OpenSLESRecorder::EnableBuiltInNS(bool enable) {
+ ALOGD("EnableBuiltInNS(%d)", enable);
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ ALOGE("Not implemented");
+ return 0;
+}
+
+bool OpenSLESRecorder::ObtainEngineInterface() {
+ ALOGD("ObtainEngineInterface");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (engine_)
+ return true;
+ // Get access to (or create if not already existing) the global OpenSL Engine
+ // object.
+ SLObjectItf engine_object = engine_manager_->GetOpenSLEngine();
+ if (engine_object == nullptr) {
+ ALOGE("Failed to access the global OpenSL engine");
+ return false;
+ }
+ // Get the SL Engine Interface which is implicit.
+ if (LOG_ON_ERROR(
+ (*engine_object)
+ ->GetInterface(engine_object, SL_IID_ENGINE, &engine_))) {
+ return false;
+ }
+ return true;
+}
+
+bool OpenSLESRecorder::CreateAudioRecorder() {
+ ALOGD("CreateAudioRecorder");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (recorder_object_.Get())
+ return true;
+ RTC_DCHECK(!recorder_);
+ RTC_DCHECK(!simple_buffer_queue_);
+
+ // Audio source configuration.
+ SLDataLocator_IODevice mic_locator = {SL_DATALOCATOR_IODEVICE,
+ SL_IODEVICE_AUDIOINPUT,
+ SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
+ SLDataSource audio_source = {&mic_locator, NULL};
+
+ // Audio sink configuration.
+ SLDataLocator_AndroidSimpleBufferQueue buffer_queue = {
+ SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
+ static_cast<SLuint32>(kNumOfOpenSLESBuffers)};
+ SLDataSink audio_sink = {&buffer_queue, &pcm_format_};
+
+ // Create the audio recorder object (requires the RECORD_AUDIO permission).
+ // Do not realize the recorder yet. Set the configuration first.
+ const SLInterfaceID interface_id[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+ SL_IID_ANDROIDCONFIGURATION};
+ const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
+ if (LOG_ON_ERROR((*engine_)->CreateAudioRecorder(
+ engine_, recorder_object_.Receive(), &audio_source, &audio_sink,
+ arraysize(interface_id), interface_id, interface_required))) {
+ return false;
+ }
+
+ // Configure the audio recorder (before it is realized).
+ SLAndroidConfigurationItf recorder_config;
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(recorder_object_.Get(),
+ SL_IID_ANDROIDCONFIGURATION,
+ &recorder_config)))) {
+ return false;
+ }
+
+ // Uses the default microphone tuned for audio communication.
+ // Note that, SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION leads to a fast
+ // track but also excludes usage of required effects like AEC, AGC and NS.
+ // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
+ SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
+ if (LOG_ON_ERROR(((*recorder_config)
+ ->SetConfiguration(recorder_config,
+ SL_ANDROID_KEY_RECORDING_PRESET,
+ &stream_type, sizeof(SLint32))))) {
+ return false;
+ }
+
+ // The audio recorder can now be realized (in synchronous mode).
+ if (LOG_ON_ERROR((recorder_object_->Realize(recorder_object_.Get(),
+ SL_BOOLEAN_FALSE)))) {
+ return false;
+ }
+
+ // Get the implicit recorder interface (SL_IID_RECORD).
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(
+ recorder_object_.Get(), SL_IID_RECORD, &recorder_)))) {
+ return false;
+ }
+
+ // Get the simple buffer queue interface (SL_IID_ANDROIDSIMPLEBUFFERQUEUE).
+ // It was explicitly requested.
+ if (LOG_ON_ERROR((recorder_object_->GetInterface(
+ recorder_object_.Get(), SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+ &simple_buffer_queue_)))) {
+ return false;
+ }
+
+ // Register the input callback for the simple buffer queue.
+ // This callback will be called when receiving new data from the device.
+ if (LOG_ON_ERROR(((*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_,
+ SimpleBufferQueueCallback, this)))) {
+ return false;
+ }
+ return true;
+}
+
+void OpenSLESRecorder::DestroyAudioRecorder() {
+ ALOGD("DestroyAudioRecorder");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ if (!recorder_object_.Get())
+ return;
+ (*simple_buffer_queue_)
+ ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr);
+ recorder_object_.Reset();
+ recorder_ = nullptr;
+ simple_buffer_queue_ = nullptr;
+}
+
+void OpenSLESRecorder::SimpleBufferQueueCallback(
+ SLAndroidSimpleBufferQueueItf buffer_queue,
+ void* context) {
+ OpenSLESRecorder* stream = static_cast<OpenSLESRecorder*>(context);
+ stream->ReadBufferQueue();
+}
+
+void OpenSLESRecorder::AllocateDataBuffers() {
+ ALOGD("AllocateDataBuffers");
+ RTC_DCHECK(thread_checker_.IsCurrent());
+ RTC_DCHECK(!simple_buffer_queue_);
+ RTC_CHECK(audio_device_buffer_);
+ // Create a modified audio buffer class which allows us to deliver any number
+ // of samples (and not only multiple of 10ms) to match the native audio unit
+ // buffer size.
+ ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer());
+ ALOGD("frames per 10ms buffer: %zu",
+ audio_parameters_.frames_per_10ms_buffer());
+ ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer());
+ ALOGD("native sample rate: %d", audio_parameters_.sample_rate());
+ RTC_DCHECK(audio_device_buffer_);
+ fine_audio_buffer_ = std::make_unique<FineAudioBuffer>(audio_device_buffer_);
+ // Allocate queue of audio buffers that stores recorded audio samples.
+ const int buffer_size_samples =
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels();
+ audio_buffers_.reset(new std::unique_ptr<SLint16[]>[kNumOfOpenSLESBuffers]);
+ for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) {
+ audio_buffers_[i].reset(new SLint16[buffer_size_samples]);
+ }
+}
+
+void OpenSLESRecorder::ReadBufferQueue() {
+ RTC_DCHECK(thread_checker_opensles_.IsCurrent());
+ SLuint32 state = GetRecordState();
+ if (state != SL_RECORDSTATE_RECORDING) {
+ ALOGW("Buffer callback in non-recording state!");
+ return;
+ }
+ // Check delta time between two successive callbacks and provide a warning
+ // if it becomes very large.
+ // TODO(henrika): using 150ms as upper limit but this value is rather random.
+ const uint32_t current_time = rtc::Time();
+ const uint32_t diff = current_time - last_rec_time_;
+ if (diff > 150) {
+ ALOGW("Bad OpenSL ES record timing, dT=%u [ms]", diff);
+ }
+ last_rec_time_ = current_time;
+ // Send recorded audio data to the WebRTC sink.
+ // TODO(henrika): fix delay estimates. It is OK to use fixed values for now
+ // since there is no support to turn off built-in EC in combination with
+ // OpenSL ES anyhow. Hence, as is, the WebRTC based AEC (which would use
+ // these estimates) will never be active.
+ fine_audio_buffer_->DeliverRecordedData(
+ rtc::ArrayView<const int16_t>(
+ audio_buffers_[buffer_index_].get(),
+ audio_parameters_.frames_per_buffer() * audio_parameters_.channels()),
+ 25);
+ // Enqueue the utilized audio buffer and use if for recording again.
+ EnqueueAudioBuffer();
+}
+
+bool OpenSLESRecorder::EnqueueAudioBuffer() {
+ SLresult err =
+ (*simple_buffer_queue_)
+ ->Enqueue(
+ simple_buffer_queue_,
+ reinterpret_cast<SLint8*>(audio_buffers_[buffer_index_].get()),
+ audio_parameters_.GetBytesPerBuffer());
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("Enqueue failed: %s", GetSLErrorString(err));
+ return false;
+ }
+ buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers;
+ return true;
+}
+
+SLuint32 OpenSLESRecorder::GetRecordState() const {
+ RTC_DCHECK(recorder_);
+ SLuint32 state;
+ SLresult err = (*recorder_)->GetRecordState(recorder_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetRecordState failed: %s", GetSLErrorString(err));
+ }
+ return state;
+}
+
+SLAndroidSimpleBufferQueueState OpenSLESRecorder::GetBufferQueueState() const {
+ RTC_DCHECK(simple_buffer_queue_);
+ // state.count: Number of buffers currently in the queue.
+ // state.index: Index of the currently filling buffer. This is a linear index
+ // that keeps a cumulative count of the number of buffers recorded.
+ SLAndroidSimpleBufferQueueState state;
+ SLresult err =
+ (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &state);
+ if (SL_RESULT_SUCCESS != err) {
+ ALOGE("GetState failed: %s", GetSLErrorString(err));
+ }
+ return state;
+}
+
+void OpenSLESRecorder::LogBufferState() const {
+ SLAndroidSimpleBufferQueueState state = GetBufferQueueState();
+ ALOGD("state.count:%d state.index:%d", state.count, state.index);
+}
+
+SLuint32 OpenSLESRecorder::GetBufferCount() {
+ SLAndroidSimpleBufferQueueState state = GetBufferQueueState();
+ return state.count;
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h
new file mode 100644
index 0000000000..93c4e4eec9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/audio_device/opensles_recorder.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
+#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/sequence_checker.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_device/fine_audio_buffer.h"
+#include "modules/audio_device/include/audio_device_defines.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+
+namespace webrtc {
+
+class FineAudioBuffer;
+
+namespace jni {
+
+// Implements 16-bit mono PCM audio input support for Android using the
+// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done.
+//
+// An instance can be created on any thread, but must then be used on one and
+// the same thread. All public methods must also be called on the same thread. A
+// thread checker will RTC_DCHECK if any method is called on an invalid thread.
+// Recorded audio buffers are provided on a dedicated internal thread managed by
+// the OpenSL ES layer.
+//
+// The existing design forces the user to call InitRecording() after
+// StopRecording() to be able to call StartRecording() again. This is inline
+// with how the Java-based implementation works.
+//
+// As of API level 21, lower latency audio input is supported on select devices.
+// To take advantage of this feature, first confirm that lower latency output is
+// available. The capability for lower latency output is a prerequisite for the
+// lower latency input feature. Then, create an AudioRecorder with the same
+// sample rate and buffer size as would be used for output. OpenSL ES interfaces
+// for input effects preclude the lower latency path.
+// See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html
+// for more details.
+class OpenSLESRecorder : public AudioInput {
+ public:
+ // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is
+ // required for lower latency. Beginning with API level 18 (Android 4.3), a
+ // buffer count of 1 is sufficient for lower latency. In addition, the buffer
+ // size and sample rate must be compatible with the device's native input
+ // configuration provided via the audio manager at construction.
+ // TODO(henrika): perhaps set this value dynamically based on OS version.
+ static const int kNumOfOpenSLESBuffers = 2;
+
+ OpenSLESRecorder(const AudioParameters& audio_parameters,
+ rtc::scoped_refptr<OpenSLEngineManager> engine_manager);
+ ~OpenSLESRecorder() override;
+
+ int Init() override;
+ int Terminate() override;
+
+ int InitRecording() override;
+ bool RecordingIsInitialized() const override;
+
+ int StartRecording() override;
+ int StopRecording() override;
+ bool Recording() const override;
+
+ void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override;
+
+ // TODO(henrika): add support using OpenSL ES APIs when available.
+ bool IsAcousticEchoCancelerSupported() const override;
+ bool IsNoiseSuppressorSupported() const override;
+ int EnableBuiltInAEC(bool enable) override;
+ int EnableBuiltInNS(bool enable) override;
+
+ private:
+ // Obtaines the SL Engine Interface from the existing global Engine object.
+ // The interface exposes creation methods of all the OpenSL ES object types.
+ // This method defines the `engine_` member variable.
+ bool ObtainEngineInterface();
+
+ // Creates/destroys the audio recorder and the simple-buffer queue object.
+ bool CreateAudioRecorder();
+ void DestroyAudioRecorder();
+
+ // Allocate memory for audio buffers which will be used to capture audio
+ // via the SLAndroidSimpleBufferQueueItf interface.
+ void AllocateDataBuffers();
+
+ // These callback methods are called when data has been written to the input
+ // buffer queue. They are both called from an internal "OpenSL ES thread"
+ // which is not attached to the Dalvik VM.
+ static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller,
+ void* context);
+ void ReadBufferQueue();
+
+ // Wraps calls to SLAndroidSimpleBufferQueueState::Enqueue() and it can be
+ // called both on the main thread (but before recording has started) and from
+ // the internal audio thread while input streaming is active. It uses
+ // `simple_buffer_queue_` but no lock is needed since the initial calls from
+ // the main thread and the native callback thread are mutually exclusive.
+ bool EnqueueAudioBuffer();
+
+ // Returns the current recorder state.
+ SLuint32 GetRecordState() const;
+
+ // Returns the current buffer queue state.
+ SLAndroidSimpleBufferQueueState GetBufferQueueState() const;
+
+ // Number of buffers currently in the queue.
+ SLuint32 GetBufferCount();
+
+ // Prints a log message of the current queue state. Can be used for debugging
+ // purposes.
+ void LogBufferState() const;
+
+ // Ensures that methods are called from the same thread as this object is
+ // created on.
+ SequenceChecker thread_checker_;
+
+ // Stores thread ID in first call to SimpleBufferQueueCallback() from internal
+ // non-application thread which is not attached to the Dalvik JVM.
+ // Detached during construction of this object.
+ SequenceChecker thread_checker_opensles_;
+
+ const AudioParameters audio_parameters_;
+
+ // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
+ AudioDeviceBuffer* audio_device_buffer_;
+
+ // PCM-type format definition.
+ // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if
+ // 32-bit float representation is needed.
+ SLDataFormat_PCM pcm_format_;
+
+ bool initialized_;
+ bool recording_;
+
+ const rtc::scoped_refptr<OpenSLEngineManager> engine_manager_;
+ // This interface exposes creation methods for all the OpenSL ES object types.
+ // It is the OpenSL ES API entry point.
+ SLEngineItf engine_;
+
+ // The audio recorder media object records audio to the destination specified
+ // by the data sink capturing it from the input specified by the data source.
+ ScopedSLObjectItf recorder_object_;
+
+ // This interface is supported on the audio recorder object and it controls
+ // the state of the audio recorder.
+ SLRecordItf recorder_;
+
+ // The Android Simple Buffer Queue interface is supported on the audio
+ // recorder. For recording, an app should enqueue empty buffers. When a
+ // registered callback sends notification that the system has finished writing
+ // data to the buffer, the app can read the buffer.
+ SLAndroidSimpleBufferQueueItf simple_buffer_queue_;
+
+ // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms
+ // chunks of audio.
+ std::unique_ptr<FineAudioBuffer> fine_audio_buffer_;
+
+ // Queue of audio buffers to be used by the recorder object for capturing
+ // audio. They will be used in a Round-robin way and the size of each buffer
+ // is given by AudioParameters::frames_per_buffer(), i.e., it corresponds to
+ // the native OpenSL ES buffer size.
+ std::unique_ptr<std::unique_ptr<SLint16[]>[]> audio_buffers_;
+
+ // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue.
+ // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ...
+ int buffer_index_;
+
+ // Last time the OpenSL ES layer delivered recorded audio data.
+ uint32_t last_rec_time_;
+};
+
+} // namespace jni
+
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_OPENSLES_RECORDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc
new file mode 100644
index 0000000000..d445cc754e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_decoder_factory_factory.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioDecoderFactoryFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong
+JNI_BuiltinAudioDecoderFactoryFactory_CreateBuiltinAudioDecoderFactory(
+ JNIEnv* env) {
+ return NativeToJavaPointer(CreateBuiltinAudioDecoderFactory().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc
new file mode 100644
index 0000000000..e5a4b10eee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/builtin_audio_encoder_factory_factory.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_builtin_audio_codecs_jni/BuiltinAudioEncoderFactoryFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong
+JNI_BuiltinAudioEncoderFactoryFactory_CreateBuiltinAudioEncoderFactory(
+ JNIEnv* env) {
+ return NativeToJavaPointer(CreateBuiltinAudioEncoderFactory().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc
new file mode 100644
index 0000000000..1246d88c0b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/dav1d_codec.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/dav1d_decoder.h"
+#include "sdk/android/generated_dav1d_jni/Dav1dDecoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_Dav1dDecoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateDav1dDecoder().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc
new file mode 100644
index 0000000000..1bbc7031a0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/egl_base_10_impl.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <EGL/egl.h>
+
+#include "sdk/android/generated_video_egl_jni/EglBase10Impl_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_EglBase10Impl_GetCurrentNativeEGLContext(JNIEnv* jni) {
+ return reinterpret_cast<jlong>(eglGetCurrentContext());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc
new file mode 100644
index 0000000000..9bd73a4a51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.cc
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/encoded_image.h"
+
+#include "api/video/encoded_image.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/EncodedImage_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/scoped_java_ref_counted.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+class JavaEncodedImageBuffer : public EncodedImageBufferInterface {
+ public:
+ JavaEncodedImageBuffer(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image,
+ const uint8_t* payload,
+ size_t size)
+ : j_encoded_image_(ScopedJavaRefCounted::Retain(env, j_encoded_image)),
+ data_(const_cast<uint8_t*>(payload)),
+ size_(size) {}
+
+ const uint8_t* data() const override { return data_; }
+ uint8_t* data() override { return data_; }
+ size_t size() const override { return size_; }
+
+ private:
+ // The Java object owning the buffer.
+ const ScopedJavaRefCounted j_encoded_image_;
+
+ // TODO(bugs.webrtc.org/9378): Make const, and delete above const_cast.
+ uint8_t* const data_;
+ size_t const size_;
+};
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
+ VideoFrameType frame_type) {
+ return Java_FrameType_fromNativeIndex(env, static_cast<int>(frame_type));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(
+ JNIEnv* jni,
+ const EncodedImage& image) {
+ ScopedJavaLocalRef<jobject> buffer = NewDirectByteBuffer(
+ jni, const_cast<uint8_t*>(image.data()), image.size());
+ ScopedJavaLocalRef<jobject> frame_type =
+ NativeToJavaFrameType(jni, image._frameType);
+ ScopedJavaLocalRef<jobject> qp;
+ if (image.qp_ != -1)
+ qp = NativeToJavaInteger(jni, image.qp_);
+ // TODO(bugs.webrtc.org/9378): Keep a reference to the C++ EncodedImage data,
+ // and use the releaseCallback to manage lifetime.
+ return Java_EncodedImage_Constructor(
+ jni, buffer,
+ /*releaseCallback=*/ScopedJavaGlobalRef<jobject>(nullptr),
+ static_cast<int>(image._encodedWidth),
+ static_cast<int>(image._encodedHeight),
+ image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type,
+ static_cast<jint>(image.rotation_), qp);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
+ JNIEnv* env,
+ const std::vector<VideoFrameType>& frame_types) {
+ return NativeToJavaObjectArray(
+ env, frame_types, org_webrtc_EncodedImage_00024FrameType_clazz(env),
+ &NativeToJavaFrameType);
+}
+
+EncodedImage JavaToNativeEncodedImage(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image) {
+ const JavaRef<jobject>& j_buffer =
+ Java_EncodedImage_getBuffer(env, j_encoded_image);
+ const uint8_t* buffer =
+ static_cast<uint8_t*>(env->GetDirectBufferAddress(j_buffer.obj()));
+ const size_t buffer_size = env->GetDirectBufferCapacity(j_buffer.obj());
+
+ EncodedImage frame;
+ frame.SetEncodedData(rtc::make_ref_counted<JavaEncodedImageBuffer>(
+ env, j_encoded_image, buffer, buffer_size));
+
+ frame._encodedWidth = Java_EncodedImage_getEncodedWidth(env, j_encoded_image);
+ frame._encodedHeight =
+ Java_EncodedImage_getEncodedHeight(env, j_encoded_image);
+ frame.rotation_ =
+ (VideoRotation)Java_EncodedImage_getRotation(env, j_encoded_image);
+
+ frame.qp_ = JavaToNativeOptionalInt(
+ env, Java_EncodedImage_getQp(env, j_encoded_image))
+ .value_or(-1);
+
+ frame._frameType =
+ (VideoFrameType)Java_EncodedImage_getFrameType(env, j_encoded_image);
+ return frame;
+}
+
+int64_t GetJavaEncodedImageCaptureTimeNs(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image) {
+ return Java_EncodedImage_getCaptureTimeNs(env, j_encoded_image);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h
new file mode 100644
index 0000000000..fc6d06243c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/encoded_image.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
+#define SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "api/video/video_frame_type.h"
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+
+class EncodedImage;
+
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaFrameType(JNIEnv* env,
+ VideoFrameType frame_type);
+ScopedJavaLocalRef<jobject> NativeToJavaEncodedImage(JNIEnv* jni,
+ const EncodedImage& image);
+ScopedJavaLocalRef<jobjectArray> NativeToJavaFrameTypeArray(
+ JNIEnv* env,
+ const std::vector<VideoFrameType>& frame_types);
+
+EncodedImage JavaToNativeEncodedImage(JNIEnv* env,
+ const JavaRef<jobject>& j_encoded_image);
+
+int64_t GetJavaEncodedImageCaptureTimeNs(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_ENCODED_IMAGE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc
new file mode 100644
index 0000000000..882df95b82
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/h264_utils.cc
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "sdk/android/generated_video_jni/H264Utils_jni.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+
+namespace webrtc {
+namespace jni {
+
+static jboolean JNI_H264Utils_IsSameH264Profile(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& params1,
+ const JavaParamRef<jobject>& params2) {
+ return H264IsSameProfile(JavaToNativeStringMap(env, params1),
+ JavaToNativeStringMap(env, params2));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc
new file mode 100644
index 0000000000..95dcd66bb5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/java_i420_buffer.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/generated_video_jni/JavaI420Buffer_jni.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_JavaI420Buffer_CropAndScaleI420(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint scale_width,
+ jint scale_height) {
+ uint8_t const* src_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ uint8_t const* src_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ uint8_t const* src_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Perform cropping using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
+ src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
+
+ bool ret = libyuv::I420Scale(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
+ crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
+ dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
+ RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc
new file mode 100644
index 0000000000..3764f8deeb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_common.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/ref_count.h"
+#include "sdk/android/generated_base_jni/JniCommon_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_JniCommon_AddRef(JNIEnv* jni,
+ jlong j_native_ref_counted_pointer) {
+ reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
+ ->AddRef();
+}
+
+static void JNI_JniCommon_ReleaseRef(JNIEnv* jni,
+ jlong j_native_ref_counted_pointer) {
+ reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
+ ->Release();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_JniCommon_AllocateByteBuffer(
+ JNIEnv* jni,
+ jint size) {
+ void* new_data = ::operator new(size);
+ return NewDirectByteBuffer(jni, new_data, size);
+}
+
+static void JNI_JniCommon_FreeByteBuffer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& byte_buffer) {
+ void* data = jni->GetDirectBufferAddress(byte_buffer.obj());
+ ::operator delete(data);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc
new file mode 100644
index 0000000000..dc34849d1b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/jni_generator_helper.h"
+
+#include "sdk/android/native_api/jni/class_loader.h"
+
+namespace webrtc {
+
+// If `atomic_class_id` set, it'll return immediately. Otherwise, it will look
+// up the class and store it. If there's a race, we take care to only store one
+// global reference (and the duplicated effort will happen only once).
+jclass LazyGetClass(JNIEnv* env,
+ const char* class_name,
+ std::atomic<jclass>* atomic_class_id) {
+ const jclass value = std::atomic_load(atomic_class_id);
+ if (value)
+ return value;
+ webrtc::ScopedJavaGlobalRef<jclass> clazz(webrtc::GetClass(env, class_name));
+ RTC_CHECK(!clazz.is_null()) << class_name;
+ jclass cas_result = nullptr;
+ if (std::atomic_compare_exchange_strong(atomic_class_id, &cas_result,
+ clazz.obj())) {
+ // We sucessfully stored `clazz` in `atomic_class_id`, so we are
+ // intentionally leaking the global ref since it's now stored there.
+ return clazz.Release();
+ } else {
+ // Some other thread came before us and stored a global pointer in
+ // `atomic_class_id`. Relase our global ref and return the ref from the
+ // other thread.
+ return cas_result;
+ }
+}
+
+// If `atomic_method_id` set, it'll return immediately. Otherwise, it will look
+// up the method id and store it. If there's a race, it's ok since the values
+// are the same (and the duplicated effort will happen only once).
+template <MethodID::Type type>
+jmethodID MethodID::LazyGet(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ const jmethodID value = std::atomic_load(atomic_method_id);
+ if (value)
+ return value;
+ auto get_method_ptr = type == MethodID::TYPE_STATIC
+ ? &JNIEnv::GetStaticMethodID
+ : &JNIEnv::GetMethodID;
+ jmethodID id = (env->*get_method_ptr)(clazz, method_name, jni_signature);
+ CHECK_EXCEPTION(env) << "error during GetMethodID: " << method_name << ", "
+ << jni_signature;
+ RTC_CHECK(id) << method_name << ", " << jni_signature;
+ std::atomic_store(atomic_method_id, id);
+ return id;
+}
+
+// Various template instantiations.
+template jmethodID MethodID::LazyGet<MethodID::TYPE_STATIC>(
+ JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+
+template jmethodID MethodID::LazyGet<MethodID::TYPE_INSTANCE>(
+ JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h
new file mode 100644
index 0000000000..23695ca8c7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_generator_helper.h
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+// Do not include this file directly. It's intended to be used only by the JNI
+// generation script. We are exporting types in strange namespaces in order to
+// be compatible with the generated code targeted for Chromium.
+
+#ifndef SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
+#define SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
+
+#include <jni.h>
+#include <atomic>
+
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/jni/jni_int_wrapper.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+#define CHECK_CLAZZ(env, jcaller, clazz, ...) RTC_DCHECK(clazz);
+#define CHECK_NATIVE_PTR(env, jcaller, native_ptr, method_name, ...) \
+ RTC_DCHECK(native_ptr) << method_name;
+
+#define BASE_EXPORT
+#define JNI_REGISTRATION_EXPORT __attribute__((visibility("default")))
+
+#if defined(WEBRTC_ARCH_X86)
+// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on
+// x86 - use force_align_arg_pointer to realign the stack at the JNI
+// boundary. crbug.com/655248
+#define JNI_GENERATOR_EXPORT \
+ __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT JNICALL
+#else
+#define JNI_GENERATOR_EXPORT extern "C" JNIEXPORT JNICALL
+#endif
+
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+namespace webrtc {
+
+// This function will initialize `atomic_class_id` to contain a global ref to
+// the given class, and will return that ref on subsequent calls. The caller is
+// responsible to zero-initialize `atomic_class_id`. It's fine to
+// simultaneously call this on multiple threads referencing the same
+// `atomic_method_id`.
+jclass LazyGetClass(JNIEnv* env,
+ const char* class_name,
+ std::atomic<jclass>* atomic_class_id);
+
+// This class is a wrapper for JNIEnv Get(Static)MethodID.
+class MethodID {
+ public:
+ enum Type {
+ TYPE_STATIC,
+ TYPE_INSTANCE,
+ };
+
+ // This function will initialize `atomic_method_id` to contain a ref to
+ // the given method, and will return that ref on subsequent calls. The caller
+ // is responsible to zero-initialize `atomic_method_id`. It's fine to
+ // simultaneously call this on multiple threads referencing the same
+ // `atomic_method_id`.
+ template <Type type>
+ static jmethodID LazyGet(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id);
+};
+
+} // namespace webrtc
+
+// Re-export relevant classes into the namespaces the script expects.
+namespace base {
+namespace android {
+
+using webrtc::JavaParamRef;
+using webrtc::JavaRef;
+using webrtc::ScopedJavaLocalRef;
+using webrtc::LazyGetClass;
+using webrtc::MethodID;
+
+} // namespace android
+} // namespace base
+
+namespace jni_generator {
+inline void CheckException(JNIEnv* env) {
+ CHECK_EXCEPTION(env);
+}
+
+// A 32 bit number could be an address on stack. Random 64 bit marker on the
+// stack is much less likely to be present on stack.
+constexpr uint64_t kJniStackMarkerValue = 0xbdbdef1bebcade1b;
+
+// Context about the JNI call with exception checked to be stored in stack.
+struct BASE_EXPORT JniJavaCallContextUnchecked {
+ inline JniJavaCallContextUnchecked() {
+// TODO(ssid): Implement for other architectures.
+#if defined(__arm__) || defined(__aarch64__)
+ // This assumes that this method does not increment the stack pointer.
+ asm volatile("mov %0, sp" : "=r"(sp));
+#else
+ sp = 0;
+#endif
+ }
+
+ // Force no inline to reduce code size.
+ template <base::android::MethodID::Type type>
+ void Init(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ env1 = env;
+
+ // Make sure compiler doesn't optimize out the assignment.
+ memcpy(&marker, &kJniStackMarkerValue, sizeof(kJniStackMarkerValue));
+ // Gets PC of the calling function.
+ pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
+
+ method_id = base::android::MethodID::LazyGet<type>(
+ env, clazz, method_name, jni_signature, atomic_method_id);
+ }
+
+ ~JniJavaCallContextUnchecked() {
+ // Reset so that spurious marker finds are avoided.
+ memset(&marker, 0, sizeof(marker));
+ }
+
+ uint64_t marker;
+ uintptr_t sp;
+ uintptr_t pc;
+
+ JNIEnv* env1;
+ jmethodID method_id;
+};
+
+// Context about the JNI call with exception unchecked to be stored in stack.
+struct BASE_EXPORT JniJavaCallContextChecked {
+ // Force no inline to reduce code size.
+ template <base::android::MethodID::Type type>
+ void Init(JNIEnv* env,
+ jclass clazz,
+ const char* method_name,
+ const char* jni_signature,
+ std::atomic<jmethodID>* atomic_method_id) {
+ base.Init<type>(env, clazz, method_name, jni_signature, atomic_method_id);
+ // Reset `pc` to correct caller.
+ base.pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
+ }
+
+ ~JniJavaCallContextChecked() { jni_generator::CheckException(base.env1); }
+
+ JniJavaCallContextUnchecked base;
+};
+
+static_assert(sizeof(JniJavaCallContextChecked) ==
+ sizeof(JniJavaCallContextUnchecked),
+ "Stack unwinder cannot work with structs of different sizes.");
+} // namespace jni_generator
+
+#endif // SDK_ANDROID_SRC_JNI_JNI_GENERATOR_HELPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc
new file mode 100644
index 0000000000..53399abab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/src/jni/jni_helpers.h"
+
+#include <vector>
+
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env,
+ void* address,
+ jlong capacity) {
+ ScopedJavaLocalRef<jobject> buffer(
+ env, env->NewDirectByteBuffer(address, capacity));
+ CHECK_EXCEPTION(env) << "error NewDirectByteBuffer";
+ return buffer;
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+ jobject ret = jni->NewGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+ RTC_CHECK(ret);
+ return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+ jni->DeleteGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(nullptr);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h
new file mode 100644
index 0000000000..7a2f27b99d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_helpers.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
+#define SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/jvm.h"
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#if defined(WEBRTC_ARCH_X86)
+// Dalvik JIT generated code doesn't guarantee 16-byte stack alignment on
+// x86 - use force_align_arg_pointer to realign the stack at the JNI
+// boundary. crbug.com/655248
+#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \
+ __attribute__((force_align_arg_pointer)) extern "C" JNIEXPORT rettype \
+ JNICALL Java_org_webrtc_##name(__VA_ARGS__)
+#else
+#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \
+ extern "C" JNIEXPORT rettype JNICALL Java_org_webrtc_##name(__VA_ARGS__)
+#endif
+
+namespace webrtc {
+namespace jni {
+
+// TODO(sakal): Remove once clients have migrated.
+using ::webrtc::JavaToStdMapStrings;
+
+// Deprecated, use NativeToJavaPointer.
+inline long jlongFromPointer(void* ptr) {
+ return NativeToJavaPointer(ptr);
+}
+
+ScopedJavaLocalRef<jobject> NewDirectByteBuffer(JNIEnv* env,
+ void* address,
+ jlong capacity);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+ explicit ScopedLocalRefFrame(JNIEnv* jni);
+ ~ScopedLocalRefFrame();
+
+ private:
+ JNIEnv* jni_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+// TODO(magjed): Remove once external clients are updated.
+namespace webrtc_jni {
+
+using webrtc::AttachCurrentThreadIfNeeded;
+using webrtc::jni::InitGlobalJniVariables;
+
+} // namespace webrtc_jni
+
+#endif // SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc
new file mode 100644
index 0000000000..a1829ad0b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jni_onload.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ webrtc::InitClassLoader(GetEnv());
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.cc b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc
new file mode 100644
index 0000000000..4cf1aa5e8e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.cc
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/jvm.h"
+
+#include <asm/unistd.h>
+#include <pthread.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+#include <string>
+
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+namespace jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data. Non-NULL in threads attached to `g_jvm` by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM* GetJVM() {
+ RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+ return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+ void* env = nullptr;
+ jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+ RTC_CHECK(((env != nullptr) && (status == JNI_OK)) ||
+ ((env == nullptr) && (status == JNI_EDETACHED)))
+ << "Unexpected GetEnv return: " << status << ":" << env;
+ return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+ // This function only runs on threads where `g_jni_ptr` is non-NULL, meaning
+ // we were responsible for originally attaching the thread, so are responsible
+ // for detaching it now. However, because some JVM implementations (notably
+ // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+ // the JVMs accounting info for this thread may already be wiped out by the
+ // time this is called. Thus it may appear we are already detached even though
+ // it was our responsibility to detach! Oh well.
+ if (!GetEnv())
+ return;
+
+ RTC_CHECK(GetEnv() == prev_jni_ptr)
+ << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+ jint status = g_jvm->DetachCurrentThread();
+ RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+ RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+ RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+ << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM* jvm) {
+ RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+ g_jvm = jvm;
+ RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+ RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+ JNIEnv* jni = nullptr;
+ if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+ return -1;
+
+ return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+ char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
+ RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+ static_cast<long>(syscall(__NR_gettid))),
+ sizeof(buf))
+ << "Thread id is bigger than uint64??";
+ return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+ char name[17] = {0};
+ if (prctl(PR_GET_NAME, name) != 0)
+ return std::string("<noname>");
+ return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ JNIEnv* jni = GetEnv();
+ if (jni)
+ return jni;
+ RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+ << "TLS has a JNIEnv* but not attached?";
+
+ std::string name(GetThreadName() + " - " + GetThreadId());
+ JavaVMAttachArgs args;
+ args.version = JNI_VERSION_1_6;
+ args.name = &name[0];
+ args.group = nullptr;
+// Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
+ void* env = nullptr;
+#else
+ JNIEnv* env = nullptr;
+#endif
+ RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+ << "Failed to attach thread";
+ RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+ jni = reinterpret_cast<JNIEnv*>(env);
+ RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+ return jni;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/jvm.h b/third_party/libwebrtc/sdk/android/src/jni/jvm.h
new file mode 100644
index 0000000000..296a7fee1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/jvm.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_JVM_H_
+#define SDK_ANDROID_SRC_JNI_JVM_H_
+
+#include <jni.h>
+
+namespace webrtc {
+namespace jni {
+
+jint InitGlobalJniVariables(JavaVM* jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM* GetJVM();
+
+// Return a |JNIEnv*| usable on this thread. Attaches to `g_jvm` if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_JVM_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc
new file mode 100644
index 0000000000..143055f79b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_codec.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/libaom_av1_decoder.h"
+#include "sdk/android/generated_libaom_av1_decoder_if_supported_jni/LibaomAv1Decoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibaomAv1Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Decoder().release());
+}
+
+static jboolean JNI_LibaomAv1Decoder_IsSupported(JNIEnv* jni) {
+ return webrtc::kIsLibaomAv1DecoderSupported;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc
new file mode 100644
index 0000000000..400c3124fe
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/libaom_av1_encoder.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h"
+#include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc
new file mode 100644
index 0000000000..84394d8ee5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/src/jni/logging/log_sink.h"
+
+#include "absl/strings/string_view.h"
+#include "sdk/android/generated_logging_jni/JNILogging_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+JNILogSink::JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging)
+ : j_logging_(env, j_logging) {}
+JNILogSink::~JNILogSink() = default;
+
+void JNILogSink::OnLogMessage(const std::string& msg) {
+ RTC_DCHECK_NOTREACHED();
+}
+
+void JNILogSink::OnLogMessage(const std::string& msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) {
+ OnLogMessage(absl::string_view{msg}, severity, tag);
+}
+
+void JNILogSink::OnLogMessage(absl::string_view msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_JNILogging_logToInjectable(
+ env, j_logging_, NativeToJavaString(env, std::string(msg)),
+ NativeToJavaInteger(env, severity), NativeToJavaString(env, tag));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h
new file mode 100644
index 0000000000..8e681ac3ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/logging/log_sink.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
+#define SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
+
+#include <string>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class JNILogSink : public rtc::LogSink {
+ public:
+ JNILogSink(JNIEnv* env, const JavaRef<jobject>& j_logging);
+ ~JNILogSink() override;
+
+ void OnLogMessage(const std::string& msg) override;
+ void OnLogMessage(const std::string& msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) override;
+ void OnLogMessage(absl::string_view msg,
+ rtc::LoggingSeverity severity,
+ const char* tag) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_logging_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_LOGGING_LOG_SINK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc
new file mode 100644
index 0000000000..f8eb48422b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/native_capturer_observer.h"
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/NativeCapturerObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
+ JNIEnv* env,
+ rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
+ return Java_NativeCapturerObserver_Constructor(
+ env, NativeToJavaPointer(native_source.release()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h
new file mode 100644
index 0000000000..51acf41f03
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/native_capturer_observer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
+
+#include <jni.h>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
+ JNIEnv* env,
+ rtc::scoped_refptr<AndroidVideoTrackSource> native_source);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_NATIVE_CAPTURER_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc
new file mode 100644
index 0000000000..d0e7972446
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/nv12_buffer.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#include <vector>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+#include "rtc_base/checks.h"
+#include "sdk/android/generated_video_jni/NV12Buffer_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_NV12Buffer_CropAndScale(JNIEnv* jni,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ jint scale_width,
+ jint scale_height,
+ const JavaParamRef<jobject>& j_src,
+ jint src_width,
+ jint src_height,
+ jint src_stride,
+ jint src_slice_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v) {
+ const int src_stride_y = src_stride;
+ const int src_stride_uv = src_stride;
+ const int crop_chroma_x = crop_x / 2;
+ const int crop_chroma_y = crop_y / 2;
+ const int crop_chroma_width = (crop_width + 1) / 2;
+ const int crop_chroma_height = (crop_height + 1) / 2;
+ const int tmp_stride_u = crop_chroma_width;
+ const int tmp_stride_v = crop_chroma_width;
+ const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
+
+ uint8_t const* src_y =
+ static_cast<uint8_t const*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t const* src_uv = src_y + src_slice_height * src_stride_y;
+
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Crop using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv;
+
+ std::vector<uint8_t> tmp_buffer(tmp_size);
+ uint8_t* tmp_u = tmp_buffer.data();
+ uint8_t* tmp_v = tmp_u + crop_chroma_height * tmp_stride_u;
+
+ libyuv::SplitUVPlane(src_uv, src_stride_uv, tmp_u, tmp_stride_u, tmp_v,
+ tmp_stride_v, crop_chroma_width, crop_chroma_height);
+
+ libyuv::I420Scale(src_y, src_stride_y, tmp_u, tmp_stride_u, tmp_v,
+ tmp_stride_v, crop_width, crop_height, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, scale_width,
+ scale_height, libyuv::kFilterBox);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc
new file mode 100644
index 0000000000..10e3316f33
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/nv21_buffer.cc
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#include <vector>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "rtc_base/checks.h"
+#include "sdk/android/generated_video_jni/NV21Buffer_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_NV21Buffer_CropAndScale(JNIEnv* jni,
+ jint crop_x,
+ jint crop_y,
+ jint crop_width,
+ jint crop_height,
+ jint scale_width,
+ jint scale_height,
+ const JavaParamRef<jbyteArray>& j_src,
+ jint src_width,
+ jint src_height,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v) {
+ const int src_stride_y = src_width;
+ const int src_stride_uv = src_width;
+ const int crop_chroma_x = crop_x / 2;
+ const int crop_chroma_y = crop_y / 2;
+
+ jboolean was_copy;
+ jbyte* src_bytes = jni->GetByteArrayElements(j_src.obj(), &was_copy);
+ RTC_DCHECK(!was_copy);
+ uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
+ uint8_t const* src_uv = src_y + src_height * src_stride_y;
+
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ // Crop using pointer arithmetic.
+ src_y += crop_x + crop_y * src_stride_y;
+ src_uv += 2 * crop_chroma_x + crop_chroma_y * src_stride_uv;
+
+ NV12ToI420Scaler scaler;
+ // U- and V-planes are swapped because this is NV21 not NV12.
+ scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
+ crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
+ dst_u, dst_stride_u, scale_width, scale_height);
+
+ jni->ReleaseByteArrayElements(j_src.obj(), src_bytes, JNI_ABORT);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc
new file mode 100644
index 0000000000..7f3dddbb28
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h"
+
+#include <utility>
+
+#include "sdk/android/generated_peerconnection_jni/AddIceObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+AddIceCandidateObserverJni::AddIceCandidateObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void AddIceCandidateObserverJni::OnComplete(webrtc::RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_AddIceObserver_onAddSuccess(env, j_observer_global_);
+ } else {
+ Java_AddIceObserver_onAddFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h
new file mode 100644
index 0000000000..1128385389
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/add_ice_candidate_observer.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class AddIceCandidateObserverJni final
+ : public rtc::RefCountedNonVirtual<AddIceCandidateObserverJni> {
+ public:
+ AddIceCandidateObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+ ~AddIceCandidateObserverJni() = default;
+
+ void OnComplete(RTCError error);
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_ADD_ICE_CANDIDATE_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h
new file mode 100644
index 0000000000..609c1b056e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/android_network_monitor.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(sakal): Remove this file once clients have update to the native API.
+#include "sdk/android/src/jni/android_network_monitor.h"
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc
new file mode 100644
index 0000000000..74c8b5547a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/audio.h"
+
+#include "modules/audio_processing/include/audio_processing.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing() {
+ return AudioProcessingBuilder().Create();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h
new file mode 100644
index 0000000000..7a79bed986
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
+#define SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
+
+#include "api/scoped_refptr.h"
+// Adding 'nogncheck' to disable the gn include headers check.
+// We don't want this target depend on audio related targets
+#include "modules/audio_processing/include/audio_processing.h" // nogncheck
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<AudioProcessing> CreateAudioProcessing();
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_AUDIO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc
new file mode 100644
index 0000000000..b00287eaae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/audio_track.cc
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_AudioTrack_SetVolume(JNIEnv*,
+ jlong j_p,
+ jdouble volume) {
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ reinterpret_cast<AudioTrackInterface*>(j_p)->GetSource());
+ source->SetVolume(volume);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
new file mode 100644
index 0000000000..b937a0d03a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/call_session_file_rotating_log_sink.cc
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtc_base/log_sinks.h"
+#include "sdk/android/generated_peerconnection_jni/CallSessionFileRotatingLogSink_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_CallSessionFileRotatingLogSink_AddSink(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_dirPath,
+ jint j_maxFileSize,
+ jint j_severity) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingLogSink* sink =
+ new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+ if (!sink->Init()) {
+ RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+ << "Failed to init CallSessionFileRotatingLogSink for path "
+ << dir_path;
+ delete sink;
+ return 0;
+ }
+ rtc::LogMessage::AddLogToStream(
+ sink, static_cast<rtc::LoggingSeverity>(j_severity));
+ return jlongFromPointer(sink);
+}
+
+static void JNI_CallSessionFileRotatingLogSink_DeleteSink(
+ JNIEnv* jni,
+ jlong j_sink) {
+ rtc::CallSessionFileRotatingLogSink* sink =
+ reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+ rtc::LogMessage::RemoveLogToStream(sink);
+ delete sink;
+}
+
+static ScopedJavaLocalRef<jbyteArray>
+JNI_CallSessionFileRotatingLogSink_GetLogData(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_dirPath) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingStreamReader file_reader(dir_path);
+ size_t log_size = file_reader.GetSize();
+ if (log_size == 0) {
+ RTC_LOG_V(rtc::LoggingSeverity::LS_WARNING)
+ << "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+ return ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(0));
+ }
+
+ // TODO(nisse, sakal): To avoid copying, change api to use ByteBuffer.
+ std::unique_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+ size_t read = file_reader.ReadAll(buffer.get(), log_size);
+
+ ScopedJavaLocalRef<jbyteArray> result =
+ ScopedJavaLocalRef<jbyteArray>(jni, jni->NewByteArray(read));
+ jni->SetByteArrayRegion(result.obj(), 0, read, buffer.get());
+
+ return result;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc
new file mode 100644
index 0000000000..af5f195d98
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/crypto_options.h"
+
+#include "sdk/android/generated_peerconnection_jni/CryptoOptions_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_crypto_options) {
+ if (j_crypto_options.is_null()) {
+ return absl::nullopt;
+ }
+
+ ScopedJavaLocalRef<jobject> j_srtp =
+ Java_CryptoOptions_getSrtp(jni, j_crypto_options);
+ ScopedJavaLocalRef<jobject> j_sframe =
+ Java_CryptoOptions_getSFrame(jni, j_crypto_options);
+
+ CryptoOptions native_crypto_options;
+ native_crypto_options.srtp.enable_gcm_crypto_suites =
+ Java_Srtp_getEnableGcmCryptoSuites(jni, j_srtp);
+ native_crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher =
+ Java_Srtp_getEnableAes128Sha1_32CryptoCipher(jni, j_srtp);
+ native_crypto_options.srtp.enable_encrypted_rtp_header_extensions =
+ Java_Srtp_getEnableEncryptedRtpHeaderExtensions(jni, j_srtp);
+ native_crypto_options.sframe.require_frame_encryption =
+ Java_SFrame_getRequireFrameEncryption(jni, j_sframe);
+ return absl::optional<CryptoOptions>(native_crypto_options);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h
new file mode 100644
index 0000000000..a9c8f2609a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/crypto_options.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
+#define SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
+
+#include <jni.h>
+
+#include "absl/types/optional.h"
+#include "api/crypto/crypto_options.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+absl::optional<CryptoOptions> JavaToNativeOptionalCryptoOptions(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_crypto_options);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_CRYPTO_OPTIONS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc
new file mode 100644
index 0000000000..3552974443
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include <limits>
+
+#include "api/data_channel_interface.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_peerconnection_jni/DataChannel_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/data_channel.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverJni : public DataChannelObserver {
+ public:
+ DataChannelObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~DataChannelObserverJni() override {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override;
+ void OnStateChange() override;
+ void OnMessage(const DataBuffer& buffer) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+DataChannelObserverJni::DataChannelObserverJni(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+void DataChannelObserverJni::OnBufferedAmountChange(uint64_t previous_amount) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onBufferedAmountChange(env, j_observer_global_,
+ previous_amount);
+}
+
+void DataChannelObserverJni::OnStateChange() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onStateChange(env, j_observer_global_);
+}
+
+void DataChannelObserverJni::OnMessage(const DataBuffer& buffer) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> byte_buffer = NewDirectByteBuffer(
+ env, const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+ ScopedJavaLocalRef<jobject> j_buffer =
+ Java_Buffer_Constructor(env, byte_buffer, buffer.binary);
+ Java_Observer_onMessage(env, j_observer_global_, j_buffer);
+}
+
+DataChannelInterface* ExtractNativeDC(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return reinterpret_cast<DataChannelInterface*>(
+ Java_DataChannel_getNativeDataChannel(jni, j_dc));
+}
+
+} // namespace
+
+DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env,
+ const JavaRef<jobject>& j_init) {
+ DataChannelInit init;
+ init.ordered = Java_Init_getOrdered(env, j_init);
+ init.maxRetransmitTime = Java_Init_getMaxRetransmitTimeMs(env, j_init);
+ init.maxRetransmits = Java_Init_getMaxRetransmits(env, j_init);
+ init.protocol = JavaToStdString(env, Java_Init_getProtocol(env, j_init));
+ init.negotiated = Java_Init_getNegotiated(env, j_init);
+ init.id = Java_Init_getId(env, j_init);
+ return init;
+}
+
+ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
+ JNIEnv* env,
+ rtc::scoped_refptr<DataChannelInterface> channel) {
+ if (!channel)
+ return nullptr;
+ // Channel is now owned by Java object, and will be freed from there.
+ return Java_DataChannel_Constructor(env, jlongFromPointer(channel.release()));
+}
+
+static jlong JNI_DataChannel_RegisterObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ const JavaParamRef<jobject>& j_observer) {
+ auto observer = std::make_unique<DataChannelObserverJni>(jni, j_observer);
+ ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+ return jlongFromPointer(observer.release());
+}
+
+static void JNI_DataChannel_UnregisterObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ jlong native_observer) {
+ ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+ delete reinterpret_cast<DataChannelObserverJni*>(native_observer);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_DataChannel_Label(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return NativeToJavaString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+static jint JNI_DataChannel_Id(JNIEnv* jni, const JavaParamRef<jobject>& j_dc) {
+ int id = ExtractNativeDC(jni, j_dc)->id();
+ RTC_CHECK_LE(id, std::numeric_limits<int32_t>::max())
+ << "id overflowed jint!";
+ return static_cast<jint>(id);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_DataChannel_State(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ return Java_State_fromNativeIndex(jni, ExtractNativeDC(jni, j_dc)->state());
+}
+
+static jlong JNI_DataChannel_BufferedAmount(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+ << "buffered_amount overflowed jlong!";
+ return static_cast<jlong>(buffered_amount);
+}
+
+static void JNI_DataChannel_Close(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc) {
+ ExtractNativeDC(jni, j_dc)->Close();
+}
+
+static jboolean JNI_DataChannel_Send(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_dc,
+ const JavaParamRef<jbyteArray>& data,
+ jboolean binary) {
+ std::vector<int8_t> buffer = JavaToNativeByteArray(jni, data);
+ bool ret = ExtractNativeDC(jni, j_dc)->Send(
+ DataBuffer(rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()), binary));
+ return ret;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h
new file mode 100644
index 0000000000..9da1b67dae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/data_channel.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
+#define SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
+
+namespace webrtc {
+namespace jni {
+
+DataChannelInit JavaToNativeDataChannelInit(JNIEnv* env,
+ const JavaRef<jobject>& j_init);
+
+ScopedJavaLocalRef<jobject> WrapNativeDataChannel(
+ JNIEnv* env,
+ rtc::scoped_refptr<DataChannelInterface> channel);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_DATA_CHANNEL_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc
new file mode 100644
index 0000000000..13cb027f6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/dtmf_sender.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/dtmf_sender_interface.h"
+#include "sdk/android/generated_peerconnection_jni/DtmfSender_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jboolean JNI_DtmfSender_CanInsertDtmf(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->CanInsertDtmf();
+}
+
+static jboolean JNI_DtmfSender_InsertDtmf(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer,
+ const JavaParamRef<jstring>& tones,
+ jint duration,
+ jint inter_tone_gap) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->InsertDtmf(JavaToStdString(jni, tones), duration, inter_tone_gap);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_DtmfSender_Tones(
+ JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return NativeToJavaString(
+ jni,
+ reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)->tones());
+}
+
+static jint JNI_DtmfSender_Duration(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->duration();
+}
+
+static jint JNI_DtmfSender_InterToneGap(JNIEnv* jni,
+ jlong j_dtmf_sender_pointer) {
+ return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
+ ->inter_tone_gap();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc
new file mode 100644
index 0000000000..af92ff8e89
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.cc
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+
+#include <string>
+
+#include "pc/webrtc_sdp.h"
+#include "sdk/android/generated_peerconnection_jni/IceCandidate_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/peer_connection.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> CreateJavaIceCandidate(JNIEnv* env,
+ const std::string& sdp_mid,
+ int sdp_mline_index,
+ const std::string& sdp,
+ const std::string server_url,
+ int adapterType) {
+ return Java_IceCandidate_Constructor(
+ env, NativeToJavaString(env, sdp_mid), sdp_mline_index,
+ NativeToJavaString(env, sdp), NativeToJavaString(env, server_url),
+ NativeToJavaAdapterType(env, adapterType));
+}
+
+} // namespace
+
+cricket::Candidate JavaToNativeCandidate(JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate) {
+ std::string sdp_mid =
+ JavaToStdString(jni, Java_IceCandidate_getSdpMid(jni, j_candidate));
+ std::string sdp =
+ JavaToStdString(jni, Java_IceCandidate_getSdp(jni, j_candidate));
+ cricket::Candidate candidate;
+ if (!SdpDeserializeCandidate(sdp_mid, sdp, &candidate, NULL)) {
+ RTC_LOG(LS_ERROR) << "SdpDescrializeCandidate failed with sdp " << sdp;
+ }
+ return candidate;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidate(
+ JNIEnv* env,
+ const cricket::Candidate& candidate) {
+ std::string sdp = SdpSerializeCandidate(candidate);
+ RTC_CHECK(!sdp.empty()) << "got an empty ICE candidate";
+ // sdp_mline_index is not used, pass an invalid value -1.
+ return CreateJavaIceCandidate(env, candidate.transport_name(),
+ -1 /* sdp_mline_index */, sdp,
+ "" /* server_url */, candidate.network_type());
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate(
+ JNIEnv* env,
+ const IceCandidateInterface& candidate) {
+ std::string sdp;
+ RTC_CHECK(candidate.ToString(&sdp)) << "got so far: " << sdp;
+ return CreateJavaIceCandidate(env, candidate.sdp_mid(),
+ candidate.sdp_mline_index(), sdp,
+ candidate.candidate().url(), 0);
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray(
+ JNIEnv* jni,
+ const std::vector<cricket::Candidate>& candidates) {
+ return NativeToJavaObjectArray(jni, candidates,
+ org_webrtc_IceCandidate_clazz(jni),
+ &NativeToJavaCandidate);
+}
+
+PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_transports_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_ice_transports_type);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kAll;
+
+ if (enum_name == "RELAY")
+ return PeerConnectionInterface::kRelay;
+
+ if (enum_name == "NOHOST")
+ return PeerConnectionInterface::kNoHost;
+
+ if (enum_name == "NONE")
+ return PeerConnectionInterface::kNone;
+
+ RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+ return PeerConnectionInterface::kAll;
+}
+
+PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_bundle_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_bundle_policy);
+
+ if (enum_name == "BALANCED")
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+
+ if (enum_name == "MAXBUNDLE")
+ return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+ if (enum_name == "MAXCOMPAT")
+ return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+ RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtcp_mux_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_rtcp_mux_policy);
+
+ if (enum_name == "NEGOTIATE")
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+ if (enum_name == "REQUIRE")
+ return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_tcp_candidate_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_tcp_candidate_policy);
+
+ if (enum_name == "ENABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+ if (enum_name == "DISABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+ RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+PeerConnectionInterface::CandidateNetworkPolicy
+JavaToNativeCandidateNetworkPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate_network_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_candidate_network_policy);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+
+ if (enum_name == "LOW_COST")
+ return PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+
+ RTC_CHECK(false) << "Unexpected CandidateNetworkPolicy enum_name "
+ << enum_name;
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+}
+
+rtc::KeyType JavaToNativeKeyType(JNIEnv* jni,
+ const JavaRef<jobject>& j_key_type) {
+ std::string enum_name = GetJavaEnumName(jni, j_key_type);
+
+ if (enum_name == "RSA")
+ return rtc::KT_RSA;
+ if (enum_name == "ECDSA")
+ return rtc::KT_ECDSA;
+
+ RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+ return rtc::KT_ECDSA;
+}
+
+PeerConnectionInterface::ContinualGatheringPolicy
+JavaToNativeContinualGatheringPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_gathering_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_gathering_policy);
+ if (enum_name == "GATHER_ONCE")
+ return PeerConnectionInterface::GATHER_ONCE;
+
+ if (enum_name == "GATHER_CONTINUALLY")
+ return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+ RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+ << enum_name;
+ return PeerConnectionInterface::GATHER_ONCE;
+}
+
+webrtc::PortPrunePolicy JavaToNativePortPrunePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_port_prune_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_port_prune_policy);
+ if (enum_name == "NO_PRUNE") {
+ return webrtc::NO_PRUNE;
+ }
+ if (enum_name == "PRUNE_BASED_ON_PRIORITY") {
+ return webrtc::PRUNE_BASED_ON_PRIORITY;
+ }
+ if (enum_name == "KEEP_FIRST_READY") {
+ return webrtc::KEEP_FIRST_READY;
+ }
+
+ RTC_CHECK(false) << " Unexpected PortPrunePolicy enum name " << enum_name;
+
+ return webrtc::NO_PRUNE;
+}
+
+PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_server_tls_cert_policy) {
+ std::string enum_name = GetJavaEnumName(jni, j_ice_server_tls_cert_policy);
+
+ if (enum_name == "TLS_CERT_POLICY_SECURE")
+ return PeerConnectionInterface::kTlsCertPolicySecure;
+
+ if (enum_name == "TLS_CERT_POLICY_INSECURE_NO_CHECK")
+ return PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
+
+ RTC_CHECK(false) << "Unexpected TlsCertPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTlsCertPolicySecure;
+}
+
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_preference) {
+ std::string enum_name = GetJavaEnumName(jni, j_network_preference);
+
+ if (enum_name == "UNKNOWN")
+ return absl::nullopt;
+
+ if (enum_name == "ETHERNET")
+ return rtc::ADAPTER_TYPE_ETHERNET;
+
+ if (enum_name == "WIFI")
+ return rtc::ADAPTER_TYPE_WIFI;
+
+ if (enum_name == "CELLULAR")
+ return rtc::ADAPTER_TYPE_CELLULAR;
+
+ if (enum_name == "VPN")
+ return rtc::ADAPTER_TYPE_VPN;
+
+ if (enum_name == "LOOPBACK")
+ return rtc::ADAPTER_TYPE_LOOPBACK;
+
+ RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
+ return absl::nullopt;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h
new file mode 100644
index 0000000000..4bdeea61c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ice_candidate.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
+#define SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
+
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/jsep.h"
+#include "api/jsep_ice_candidate.h"
+#include "api/peer_connection_interface.h"
+#include "api/rtp_parameters.h"
+#include "rtc_base/ssl_identity.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+cricket::Candidate JavaToNativeCandidate(JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidate(
+ JNIEnv* env,
+ const cricket::Candidate& candidate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaIceCandidate(
+ JNIEnv* env,
+ const IceCandidateInterface& candidate);
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaCandidateArray(
+ JNIEnv* jni,
+ const std::vector<cricket::Candidate>& candidates);
+
+/*****************************************************
+ * Below are all things that go into RTCConfiguration.
+ *****************************************************/
+PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_transports_type);
+
+PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_bundle_policy);
+
+PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtcp_mux_policy);
+
+PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_tcp_candidate_policy);
+
+PeerConnectionInterface::CandidateNetworkPolicy
+JavaToNativeCandidateNetworkPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_candidate_network_policy);
+
+rtc::KeyType JavaToNativeKeyType(JNIEnv* jni,
+ const JavaRef<jobject>& j_key_type);
+
+PeerConnectionInterface::ContinualGatheringPolicy
+JavaToNativeContinualGatheringPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_gathering_policy);
+
+webrtc::PortPrunePolicy JavaToNativePortPrunePolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_port_prune_policy);
+
+PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_server_tls_cert_policy);
+
+absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_network_preference);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_ICE_CANDIDATE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc
new file mode 100644
index 0000000000..7b35ca051c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/logging.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogToDebugOutput,
+ JNIEnv* jni,
+ jclass,
+ jint nativeSeverity) {
+ if (nativeSeverity >= rtc::LS_VERBOSE && nativeSeverity <= rtc::LS_NONE) {
+ rtc::LogMessage::LogToDebug(
+ static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ }
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogThreads,
+ JNIEnv* jni,
+ jclass) {
+ rtc::LogMessage::LogThreads(true);
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeEnableLogTimeStamps,
+ JNIEnv* jni,
+ jclass) {
+ rtc::LogMessage::LogTimestamps(true);
+}
+
+JNI_FUNCTION_DECLARATION(void,
+ Logging_nativeLog,
+ JNIEnv* jni,
+ jclass,
+ jint j_severity,
+ jstring j_tag,
+ jstring j_message) {
+ std::string message = JavaToStdString(jni, JavaParamRef<jstring>(j_message));
+ std::string tag = JavaToStdString(jni, JavaParamRef<jstring>(j_tag));
+ RTC_LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag.c_str())
+ << message;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc
new file mode 100644
index 0000000000..4e1a3ba406
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_constraints.h"
+
+#include <memory>
+
+#include "sdk/android/generated_peerconnection_jni/MediaConstraints_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Helper for translating a List<Pair<String, String>> to a Constraints.
+MediaConstraints::Constraints PopulateConstraintsFromJavaPairList(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_list) {
+ MediaConstraints::Constraints constraints;
+ for (const JavaRef<jobject>& entry : Iterable(env, j_list)) {
+ constraints.emplace_back(
+ JavaToStdString(env, Java_KeyValuePair_getKey(env, entry)),
+ JavaToStdString(env, Java_KeyValuePair_getValue(env, entry)));
+ }
+ return constraints;
+}
+
+} // namespace
+
+// Copies all needed data so Java object is no longer needed at return.
+std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_constraints) {
+ return std::make_unique<MediaConstraints>(
+ PopulateConstraintsFromJavaPairList(
+ env, Java_MediaConstraints_getMandatory(env, j_constraints)),
+ PopulateConstraintsFromJavaPairList(
+ env, Java_MediaConstraints_getOptional(env, j_constraints)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h
new file mode 100644
index 0000000000..68cedc7f2d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_constraints.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
+
+#include <jni.h>
+#include <memory>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<MediaConstraints> JavaToNativeMediaConstraints(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_constraints);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_CONSTRAINTS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc
new file mode 100644
index 0000000000..e20f28f310
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_source.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/MediaSource_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+static ScopedJavaLocalRef<jobject> JNI_MediaSource_GetState(JNIEnv* jni,
+ jlong j_p) {
+ return Java_State_fromNativeIndex(
+ jni, reinterpret_cast<MediaSourceInterface*>(j_p)->state());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc
new file mode 100644
index 0000000000..20d59a6f8f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_stream.h"
+
+#include <memory>
+
+#include "sdk/android/generated_peerconnection_jni/MediaStream_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JavaMediaStream::JavaMediaStream(
+ JNIEnv* env,
+ rtc::scoped_refptr<MediaStreamInterface> media_stream)
+ : j_media_stream_(
+ env,
+ Java_MediaStream_Constructor(env,
+ jlongFromPointer(media_stream.get()))) {
+ // Create an observer to update the Java stream when the native stream's set
+ // of tracks changes.
+ observer_.reset(new MediaStreamObserver(
+ media_stream.get(),
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ OnAudioTrackAddedToStream(audio_track, media_stream);
+ },
+ [this](AudioTrackInterface* audio_track,
+ MediaStreamInterface* media_stream) {
+ OnAudioTrackRemovedFromStream(audio_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ OnVideoTrackAddedToStream(video_track, media_stream);
+ },
+ [this](VideoTrackInterface* video_track,
+ MediaStreamInterface* media_stream) {
+ OnVideoTrackRemovedFromStream(video_track, media_stream);
+ }));
+ for (rtc::scoped_refptr<AudioTrackInterface> track :
+ media_stream->GetAudioTracks()) {
+ Java_MediaStream_addNativeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track.release()));
+ }
+ for (rtc::scoped_refptr<VideoTrackInterface> track :
+ media_stream->GetVideoTracks()) {
+ Java_MediaStream_addNativeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track.release()));
+ }
+ // `j_media_stream` holds one reference. Corresponding Release() is in
+ // MediaStream_free, triggered by MediaStream.dispose().
+ media_stream.release();
+}
+
+JavaMediaStream::~JavaMediaStream() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Remove the observer first, so it doesn't react to events during deletion.
+ observer_ = nullptr;
+ Java_MediaStream_dispose(env, j_media_stream_);
+}
+
+void JavaMediaStream::OnAudioTrackAddedToStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ track->AddRef();
+ Java_MediaStream_addNativeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnVideoTrackAddedToStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ track->AddRef();
+ Java_MediaStream_addNativeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnAudioTrackRemovedFromStream(
+ AudioTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ Java_MediaStream_removeAudioTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+void JavaMediaStream::OnVideoTrackRemovedFromStream(
+ VideoTrackInterface* track,
+ MediaStreamInterface* stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(env);
+ Java_MediaStream_removeVideoTrack(env, j_media_stream_,
+ jlongFromPointer(track));
+}
+
+jclass GetMediaStreamClass(JNIEnv* env) {
+ return org_webrtc_MediaStream_clazz(env);
+}
+
+static jboolean JNI_MediaStream_AddAudioTrackToNativeStream(
+ JNIEnv* jni,
+ jlong pointer,
+ jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ rtc::scoped_refptr<AudioTrackInterface>(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_AddVideoTrackToNativeStream(
+ JNIEnv* jni,
+ jlong pointer,
+ jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ rtc::scoped_refptr<VideoTrackInterface>(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_RemoveAudioTrack(JNIEnv* jni,
+ jlong pointer,
+ jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ rtc::scoped_refptr<AudioTrackInterface>(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer)));
+}
+
+static jboolean JNI_MediaStream_RemoveVideoTrack(JNIEnv* jni,
+ jlong pointer,
+ jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ rtc::scoped_refptr<VideoTrackInterface>(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)));
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStream_GetId(JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(jni,
+ reinterpret_cast<MediaStreamInterface*>(j_p)->id());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h
new file mode 100644
index 0000000000..efa177c43e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
+
+#include <jni.h>
+#include <memory>
+
+#include "api/media_stream_interface.h"
+#include "pc/media_stream_observer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class JavaMediaStream {
+ public:
+ explicit JavaMediaStream(
+ JNIEnv* env,
+ rtc::scoped_refptr<MediaStreamInterface> media_stream);
+ ~JavaMediaStream();
+
+ const ScopedJavaGlobalRef<jobject>& j_media_stream() {
+ return j_media_stream_;
+ }
+
+ private:
+ void OnAudioTrackAddedToStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackAddedToStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnAudioTrackRemovedFromStream(AudioTrackInterface* track,
+ MediaStreamInterface* stream);
+ void OnVideoTrackRemovedFromStream(VideoTrackInterface* track,
+ MediaStreamInterface* stream);
+
+ ScopedJavaGlobalRef<jobject> j_media_stream_;
+ std::unique_ptr<MediaStreamObserver> observer_;
+};
+
+jclass GetMediaStreamClass(JNIEnv* env);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc
new file mode 100644
index 0000000000..928f10c03a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.cc
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_peerconnection_jni/MediaStreamTrack_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaMediaType(
+ JNIEnv* jni,
+ cricket::MediaType media_type) {
+ return Java_MediaType_fromNativeIndex(jni, media_type);
+}
+
+cricket::MediaType JavaToNativeMediaType(JNIEnv* jni,
+ const JavaRef<jobject>& j_media_type) {
+ return static_cast<cricket::MediaType>(
+ Java_MediaType_getNative(jni, j_media_type));
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetId(
+ JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+static ScopedJavaLocalRef<jstring> JNI_MediaStreamTrack_GetKind(
+ JNIEnv* jni,
+ jlong j_p) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+static jboolean JNI_MediaStreamTrack_GetEnabled(JNIEnv* jni,
+ jlong j_p) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_MediaStreamTrack_GetState(
+ JNIEnv* jni,
+ jlong j_p) {
+ return Java_State_fromNativeIndex(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+static jboolean JNI_MediaStreamTrack_SetEnabled(JNIEnv* jni,
+ jlong j_p,
+ jboolean enabled) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->set_enabled(
+ enabled);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h
new file mode 100644
index 0000000000..8bfe302db7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/media_stream_track.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
+#define SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
+
+#include <jni.h>
+
+#include "api/media_types.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaMediaType(
+ JNIEnv* jni,
+ cricket::MediaType media_type);
+cricket::MediaType JavaToNativeMediaType(JNIEnv* jni,
+ const JavaRef<jobject>& j_media_type);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_MEDIA_STREAM_TRACK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc
new file mode 100644
index 0000000000..d595c481f8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/owned_factory_and_threads.h"
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+OwnedFactoryAndThreads::OwnedFactoryAndThreads(
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread,
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory)
+ : socket_factory_(std::move(socket_factory)),
+ network_thread_(std::move(network_thread)),
+ worker_thread_(std::move(worker_thread)),
+ signaling_thread_(std::move(signaling_thread)),
+ factory_(factory) {}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h
new file mode 100644
index 0000000000..7dc9443ea5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/owned_factory_and_threads.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
+#define SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
+
+#include <jni.h>
+#include <memory>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace jni {
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread,
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface>& factory);
+
+ ~OwnedFactoryAndThreads() = default;
+
+ PeerConnectionFactoryInterface* factory() { return factory_.get(); }
+ rtc::SocketFactory* socket_factory() { return socket_factory_.get(); }
+ rtc::Thread* network_thread() { return network_thread_.get(); }
+ rtc::Thread* signaling_thread() { return signaling_thread_.get(); }
+ rtc::Thread* worker_thread() { return worker_thread_.get(); }
+
+ private:
+ // Usually implemented by the SocketServer associated with the network thread,
+ // so needs to outlive the network thread.
+ const std::unique_ptr<rtc::SocketFactory> socket_factory_;
+ const std::unique_ptr<rtc::Thread> network_thread_;
+ const std::unique_ptr<rtc::Thread> worker_thread_;
+ const std::unique_ptr<rtc::Thread> signaling_thread_;
+ const rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_OWNED_FACTORY_AND_THREADS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc
new file mode 100644
index 0000000000..502763a2d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.cc
@@ -0,0 +1,917 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file (or other files in this directory) allocates C++
+// RefCountInterfaces it AddRef()s an artificial ref simulating the jlong held
+// in Java-land, and then Release()s the ref in the respective free call.
+// Sometimes this AddRef is implicit in the construction of a scoped_refptr<>
+// which is then .release()d. Any persistent (non-local) references from C++ to
+// Java must be global or weak (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call. In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include "sdk/android/src/jni/pc/peer_connection.h"
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "api/peer_connection_interface.h"
+#include "api/rtc_event_log_output_file.h"
+#include "api/rtp_receiver_interface.h"
+#include "api/rtp_sender_interface.h"
+#include "api/rtp_transceiver_interface.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h"
+#include "sdk/android/generated_peerconnection_jni/IceCandidateErrorEvent_jni.h"
+#include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/add_ice_candidate_observer.h"
+#include "sdk/android/src/jni/pc/crypto_options.h"
+#include "sdk/android/src/jni/pc/data_channel.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+#include "sdk/android/src/jni/pc/media_constraints.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtc_certificate.h"
+#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h"
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+#include "sdk/android/src/jni/pc/sdp_observer.h"
+#include "sdk/android/src/jni/pc/session_description.h"
+#include "sdk/android/src/jni/pc/stats_observer.h"
+#include "sdk/android/src/jni/pc/turn_customizer.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+PeerConnectionInterface* ExtractNativePC(JNIEnv* jni,
+ const JavaRef<jobject>& j_pc) {
+ return reinterpret_cast<OwnedPeerConnection*>(
+ Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc))
+ ->pc();
+}
+
+PeerConnectionInterface::IceServers JavaToNativeIceServers(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_ice_servers) {
+ PeerConnectionInterface::IceServers ice_servers;
+ for (const JavaRef<jobject>& j_ice_server : Iterable(jni, j_ice_servers)) {
+ ScopedJavaLocalRef<jobject> j_ice_server_tls_cert_policy =
+ Java_IceServer_getTlsCertPolicy(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> urls =
+ Java_IceServer_getUrls(jni, j_ice_server);
+ ScopedJavaLocalRef<jstring> username =
+ Java_IceServer_getUsername(jni, j_ice_server);
+ ScopedJavaLocalRef<jstring> password =
+ Java_IceServer_getPassword(jni, j_ice_server);
+ PeerConnectionInterface::TlsCertPolicy tls_cert_policy =
+ JavaToNativeTlsCertPolicy(jni, j_ice_server_tls_cert_policy);
+ ScopedJavaLocalRef<jstring> hostname =
+ Java_IceServer_getHostname(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> tls_alpn_protocols =
+ Java_IceServer_getTlsAlpnProtocols(jni, j_ice_server);
+ ScopedJavaLocalRef<jobject> tls_elliptic_curves =
+ Java_IceServer_getTlsEllipticCurves(jni, j_ice_server);
+ PeerConnectionInterface::IceServer server;
+ server.urls = JavaListToNativeVector<std::string, jstring>(
+ jni, urls, &JavaToNativeString);
+ server.username = JavaToNativeString(jni, username);
+ server.password = JavaToNativeString(jni, password);
+ server.tls_cert_policy = tls_cert_policy;
+ server.hostname = JavaToNativeString(jni, hostname);
+ server.tls_alpn_protocols = JavaListToNativeVector<std::string, jstring>(
+ jni, tls_alpn_protocols, &JavaToNativeString);
+ server.tls_elliptic_curves = JavaListToNativeVector<std::string, jstring>(
+ jni, tls_elliptic_curves, &JavaToNativeString);
+ ice_servers.push_back(server);
+ }
+ return ice_servers;
+}
+
+SdpSemantics JavaToNativeSdpSemantics(JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp_semantics) {
+ std::string enum_name = GetJavaEnumName(jni, j_sdp_semantics);
+
+ if (enum_name == "PLAN_B")
+ return SdpSemantics::kPlanB_DEPRECATED;
+
+ if (enum_name == "UNIFIED_PLAN")
+ return SdpSemantics::kUnifiedPlan;
+
+ RTC_DCHECK_NOTREACHED();
+ return SdpSemantics::kUnifiedPlan;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaCandidatePairChange(
+ JNIEnv* env,
+ const cricket::CandidatePairChangeEvent& event) {
+ const auto& selected_pair = event.selected_candidate_pair;
+ return Java_CandidatePairChangeEvent_Constructor(
+ env, NativeToJavaCandidate(env, selected_pair.local_candidate()),
+ NativeToJavaCandidate(env, selected_pair.remote_candidate()),
+ static_cast<int>(event.last_data_received_ms),
+ NativeToJavaString(env, event.reason),
+ static_cast<int>(event.estimated_disconnected_time_ms));
+}
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env,
+ int adapterType) {
+ return Java_AdapterType_fromNativeIndex(env, adapterType);
+}
+
+void JavaToNativeRTCConfiguration(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config) {
+ ScopedJavaLocalRef<jobject> j_ice_transports_type =
+ Java_RTCConfiguration_getIceTransportsType(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_bundle_policy =
+ Java_RTCConfiguration_getBundlePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_rtcp_mux_policy =
+ Java_RTCConfiguration_getRtcpMuxPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_rtc_certificate =
+ Java_RTCConfiguration_getCertificate(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_tcp_candidate_policy =
+ Java_RTCConfiguration_getTcpCandidatePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_candidate_network_policy =
+ Java_RTCConfiguration_getCandidateNetworkPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_ice_servers =
+ Java_RTCConfiguration_getIceServers(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_continual_gathering_policy =
+ Java_RTCConfiguration_getContinualGatheringPolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_turn_port_prune_policy =
+ Java_RTCConfiguration_getTurnPortPrunePolicy(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_turn_customizer =
+ Java_RTCConfiguration_getTurnCustomizer(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_network_preference =
+ Java_RTCConfiguration_getNetworkPreference(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_sdp_semantics =
+ Java_RTCConfiguration_getSdpSemantics(jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_crypto_options =
+ Java_RTCConfiguration_getCryptoOptions(jni, j_rtc_config);
+
+ rtc_config->type = JavaToNativeIceTransportsType(jni, j_ice_transports_type);
+ rtc_config->bundle_policy = JavaToNativeBundlePolicy(jni, j_bundle_policy);
+ rtc_config->rtcp_mux_policy =
+ JavaToNativeRtcpMuxPolicy(jni, j_rtcp_mux_policy);
+ if (!j_rtc_certificate.is_null()) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::FromPEM(
+ JavaToNativeRTCCertificatePEM(jni, j_rtc_certificate));
+ RTC_CHECK(certificate != nullptr) << "supplied certificate is malformed.";
+ rtc_config->certificates.push_back(certificate);
+ }
+ rtc_config->tcp_candidate_policy =
+ JavaToNativeTcpCandidatePolicy(jni, j_tcp_candidate_policy);
+ rtc_config->candidate_network_policy =
+ JavaToNativeCandidateNetworkPolicy(jni, j_candidate_network_policy);
+ rtc_config->servers = JavaToNativeIceServers(jni, j_ice_servers);
+ rtc_config->audio_jitter_buffer_max_packets =
+ Java_RTCConfiguration_getAudioJitterBufferMaxPackets(jni, j_rtc_config);
+ rtc_config->audio_jitter_buffer_fast_accelerate =
+ Java_RTCConfiguration_getAudioJitterBufferFastAccelerate(jni,
+ j_rtc_config);
+ rtc_config->ice_connection_receiving_timeout =
+ Java_RTCConfiguration_getIceConnectionReceivingTimeout(jni, j_rtc_config);
+ rtc_config->ice_backup_candidate_pair_ping_interval =
+ Java_RTCConfiguration_getIceBackupCandidatePairPingInterval(jni,
+ j_rtc_config);
+ rtc_config->continual_gathering_policy =
+ JavaToNativeContinualGatheringPolicy(jni, j_continual_gathering_policy);
+ rtc_config->ice_candidate_pool_size =
+ Java_RTCConfiguration_getIceCandidatePoolSize(jni, j_rtc_config);
+ rtc_config->prune_turn_ports =
+ Java_RTCConfiguration_getPruneTurnPorts(jni, j_rtc_config);
+ rtc_config->turn_port_prune_policy =
+ JavaToNativePortPrunePolicy(jni, j_turn_port_prune_policy);
+ rtc_config->presume_writable_when_fully_relayed =
+ Java_RTCConfiguration_getPresumeWritableWhenFullyRelayed(jni,
+ j_rtc_config);
+ rtc_config->surface_ice_candidates_on_ice_transport_type_changed =
+ Java_RTCConfiguration_getSurfaceIceCandidatesOnIceTransportTypeChanged(
+ jni, j_rtc_config);
+ ScopedJavaLocalRef<jobject> j_ice_check_interval_strong_connectivity =
+ Java_RTCConfiguration_getIceCheckIntervalStrongConnectivity(jni,
+ j_rtc_config);
+ rtc_config->ice_check_interval_strong_connectivity =
+ JavaToNativeOptionalInt(jni, j_ice_check_interval_strong_connectivity);
+ ScopedJavaLocalRef<jobject> j_ice_check_interval_weak_connectivity =
+ Java_RTCConfiguration_getIceCheckIntervalWeakConnectivity(jni,
+ j_rtc_config);
+ rtc_config->ice_check_interval_weak_connectivity =
+ JavaToNativeOptionalInt(jni, j_ice_check_interval_weak_connectivity);
+ ScopedJavaLocalRef<jobject> j_ice_check_min_interval =
+ Java_RTCConfiguration_getIceCheckMinInterval(jni, j_rtc_config);
+ rtc_config->ice_check_min_interval =
+ JavaToNativeOptionalInt(jni, j_ice_check_min_interval);
+ ScopedJavaLocalRef<jobject> j_ice_unwritable_timeout =
+ Java_RTCConfiguration_getIceUnwritableTimeout(jni, j_rtc_config);
+ rtc_config->ice_unwritable_timeout =
+ JavaToNativeOptionalInt(jni, j_ice_unwritable_timeout);
+ ScopedJavaLocalRef<jobject> j_ice_unwritable_min_checks =
+ Java_RTCConfiguration_getIceUnwritableMinChecks(jni, j_rtc_config);
+ rtc_config->ice_unwritable_min_checks =
+ JavaToNativeOptionalInt(jni, j_ice_unwritable_min_checks);
+ ScopedJavaLocalRef<jobject> j_stun_candidate_keepalive_interval =
+ Java_RTCConfiguration_getStunCandidateKeepaliveInterval(jni,
+ j_rtc_config);
+ rtc_config->stun_candidate_keepalive_interval =
+ JavaToNativeOptionalInt(jni, j_stun_candidate_keepalive_interval);
+ ScopedJavaLocalRef<jobject> j_stable_writable_connection_ping_interval_ms =
+ Java_RTCConfiguration_getStableWritableConnectionPingIntervalMs(
+ jni, j_rtc_config);
+ rtc_config->stable_writable_connection_ping_interval_ms =
+ JavaToNativeOptionalInt(jni,
+ j_stable_writable_connection_ping_interval_ms);
+ rtc_config->disable_ipv6_on_wifi =
+ Java_RTCConfiguration_getDisableIPv6OnWifi(jni, j_rtc_config);
+ rtc_config->max_ipv6_networks =
+ Java_RTCConfiguration_getMaxIPv6Networks(jni, j_rtc_config);
+
+ rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer);
+
+ rtc_config->disable_ipv6 =
+ Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config);
+ rtc_config->media_config.enable_dscp =
+ Java_RTCConfiguration_getEnableDscp(jni, j_rtc_config);
+ rtc_config->media_config.video.enable_cpu_adaptation =
+ Java_RTCConfiguration_getEnableCpuOveruseDetection(jni, j_rtc_config);
+ rtc_config->media_config.video.suspend_below_min_bitrate =
+ Java_RTCConfiguration_getSuspendBelowMinBitrate(jni, j_rtc_config);
+ rtc_config->screencast_min_bitrate = JavaToNativeOptionalInt(
+ jni, Java_RTCConfiguration_getScreencastMinBitrate(jni, j_rtc_config));
+ rtc_config->combined_audio_video_bwe = JavaToNativeOptionalBool(
+ jni, Java_RTCConfiguration_getCombinedAudioVideoBwe(jni, j_rtc_config));
+ rtc_config->network_preference =
+ JavaToNativeNetworkPreference(jni, j_network_preference);
+ rtc_config->sdp_semantics = JavaToNativeSdpSemantics(jni, j_sdp_semantics);
+ rtc_config->active_reset_srtp_params =
+ Java_RTCConfiguration_getActiveResetSrtpParams(jni, j_rtc_config);
+ rtc_config->crypto_options =
+ JavaToNativeOptionalCryptoOptions(jni, j_crypto_options);
+
+ rtc_config->allow_codec_switching = JavaToNativeOptionalBool(
+ jni, Java_RTCConfiguration_getAllowCodecSwitching(jni, j_rtc_config));
+
+ rtc_config->offer_extmap_allow_mixed =
+ Java_RTCConfiguration_getOfferExtmapAllowMixed(jni, j_rtc_config);
+ rtc_config->enable_implicit_rollback =
+ Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config);
+
+ ScopedJavaLocalRef<jstring> j_turn_logging_id =
+ Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config);
+ if (!IsNull(jni, j_turn_logging_id)) {
+ rtc_config->turn_logging_id = JavaToNativeString(jni, j_turn_logging_id);
+ }
+}
+
+rtc::KeyType GetRtcConfigKeyType(JNIEnv* env,
+ const JavaRef<jobject>& j_rtc_config) {
+ return JavaToNativeKeyType(
+ env, Java_RTCConfiguration_getKeyType(env, j_rtc_config));
+}
+
+PeerConnectionObserverJni::PeerConnectionObserverJni(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+PeerConnectionObserverJni::~PeerConnectionObserverJni() = default;
+
+void PeerConnectionObserverJni::OnIceCandidate(
+ const IceCandidateInterface* candidate) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceCandidate(env, j_observer_global_,
+ NativeToJavaIceCandidate(env, *candidate));
+}
+
+void PeerConnectionObserverJni::OnIceCandidateError(
+ const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> event = Java_IceCandidateErrorEvent_Constructor(
+ env, NativeToJavaString(env, address), port, NativeToJavaString(env, url),
+ error_code, NativeToJavaString(env, error_text));
+ Java_Observer_onIceCandidateError(env, j_observer_global_, event);
+}
+
+void PeerConnectionObserverJni::OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceCandidatesRemoved(
+ env, j_observer_global_, NativeToJavaCandidateArray(env, candidates));
+}
+
+void PeerConnectionObserverJni::OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onSignalingChange(
+ env, j_observer_global_,
+ Java_SignalingState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceConnectionChange(
+ env, j_observer_global_,
+ Java_IceConnectionState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onStandardizedIceConnectionChange(
+ env, j_observer_global_,
+ Java_IceConnectionState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onConnectionChange(env, j_observer_global_,
+ Java_PeerConnectionState_fromNativeIndex(
+ env, static_cast<int>(new_state)));
+}
+
+void PeerConnectionObserverJni::OnIceConnectionReceivingChange(bool receiving) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceConnectionReceivingChange(env, j_observer_global_,
+ receiving);
+}
+
+void PeerConnectionObserverJni::OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onSelectedCandidatePairChanged(
+ env, j_observer_global_, NativeToJavaCandidatePairChange(env, event));
+}
+
+void PeerConnectionObserverJni::OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onIceGatheringChange(
+ env, j_observer_global_,
+ Java_IceGatheringState_fromNativeIndex(env, new_state));
+}
+
+void PeerConnectionObserverJni::OnAddStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onAddStream(
+ env, j_observer_global_,
+ GetOrCreateJavaStream(env, stream).j_media_stream());
+}
+
+void PeerConnectionObserverJni::OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
+ RTC_CHECK(it != remote_streams_.end())
+ << "unexpected stream: " << stream.get();
+ Java_Observer_onRemoveStream(env, j_observer_global_,
+ it->second.j_media_stream());
+ remote_streams_.erase(it);
+}
+
+void PeerConnectionObserverJni::OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> channel) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onDataChannel(env, j_observer_global_,
+ WrapNativeDataChannel(env, channel));
+}
+
+void PeerConnectionObserverJni::OnRenegotiationNeeded() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onRenegotiationNeeded(env, j_observer_global_);
+}
+
+void PeerConnectionObserverJni::OnAddTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_receiver =
+ NativeToJavaRtpReceiver(env, receiver);
+ rtp_receivers_.emplace_back(env, j_rtp_receiver);
+
+ Java_Observer_onAddTrack(env, j_observer_global_, j_rtp_receiver,
+ NativeToJavaMediaStreamArray(env, streams));
+}
+
+void PeerConnectionObserverJni::OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_receiver =
+ NativeToJavaRtpReceiver(env, receiver);
+ rtp_receivers_.emplace_back(env, j_rtp_receiver);
+
+ Java_Observer_onRemoveTrack(env, j_observer_global_, j_rtp_receiver);
+}
+
+void PeerConnectionObserverJni::OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_rtp_transceiver =
+ NativeToJavaRtpTransceiver(env, transceiver);
+ rtp_transceivers_.emplace_back(env, j_rtp_transceiver);
+
+ Java_Observer_onTrack(env, j_observer_global_, j_rtp_transceiver);
+}
+
+// If the NativeToJavaStreamsMap contains the stream, return it.
+// Otherwise, create a new Java MediaStream.
+JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream(
+ JNIEnv* env,
+ const rtc::scoped_refptr<MediaStreamInterface>& stream) {
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream.get());
+ if (it == remote_streams_.end()) {
+ it = remote_streams_
+ .emplace(std::piecewise_construct,
+ std::forward_as_tuple(stream.get()),
+ std::forward_as_tuple(env, stream))
+ .first;
+ }
+ return it->second;
+}
+
+ScopedJavaLocalRef<jobjectArray>
+PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
+ JNIEnv* jni,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
+ return NativeToJavaObjectArray(
+ jni, streams, GetMediaStreamClass(jni),
+ [this](JNIEnv* env, rtc::scoped_refptr<MediaStreamInterface> stream)
+ -> const ScopedJavaGlobalRef<jobject>& {
+ return GetOrCreateJavaStream(env, stream).j_media_stream();
+ });
+}
+
+OwnedPeerConnection::OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer)
+ : OwnedPeerConnection(peer_connection,
+ std::move(observer),
+ nullptr /* constraints */) {}
+
+OwnedPeerConnection::OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer,
+ std::unique_ptr<MediaConstraints> constraints)
+ : peer_connection_(peer_connection),
+ observer_(std::move(observer)),
+ constraints_(std::move(constraints)) {}
+
+OwnedPeerConnection::~OwnedPeerConnection() {
+ // Ensure that PeerConnection is destroyed before the observer.
+ peer_connection_ = nullptr;
+}
+
+static jlong JNI_PeerConnection_CreatePeerConnectionObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_observer) {
+ return jlongFromPointer(new PeerConnectionObserverJni(jni, j_observer));
+}
+
+static void JNI_PeerConnection_FreeOwnedPeerConnection(JNIEnv*, jlong j_p) {
+ delete reinterpret_cast<OwnedPeerConnection*>(j_p);
+}
+
+static jlong JNI_PeerConnection_GetNativePeerConnection(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return jlongFromPointer(ExtractNativePC(jni, j_pc));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetLocalDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc);
+ // It's only safe to operate on SessionDescriptionInterface on the
+ // signaling thread, but `jni` may only be used on the current thread, so we
+ // must do this odd dance.
+ std::string sdp;
+ std::string type;
+ pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] {
+ const SessionDescriptionInterface* desc = pc->local_description();
+ if (desc) {
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ type = desc->type();
+ }
+ });
+ return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetRemoteDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ PeerConnectionInterface* pc = ExtractNativePC(jni, j_pc);
+ // It's only safe to operate on SessionDescriptionInterface on the
+ // signaling thread, but `jni` may only be used on the current thread, so we
+ // must do this odd dance.
+ std::string sdp;
+ std::string type;
+ pc->signaling_thread()->Invoke<void>(RTC_FROM_HERE, [pc, &sdp, &type] {
+ const SessionDescriptionInterface* desc = pc->remote_description();
+ if (desc) {
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ type = desc->type();
+ }
+ });
+ return sdp.empty() ? nullptr : NativeToJavaSessionDescription(jni, sdp, type);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetCertificate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ const PeerConnectionInterface::RTCConfiguration rtc_config =
+ ExtractNativePC(jni, j_pc)->GetConfiguration();
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc_config.certificates[0];
+ return NativeToJavaRTCCertificatePEM(jni, certificate->ToPEM());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateDataChannel(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_label,
+ const JavaParamRef<jobject>& j_init) {
+ DataChannelInit init = JavaToNativeDataChannelInit(jni, j_init);
+ auto result = ExtractNativePC(jni, j_pc)->CreateDataChannelOrError(
+ JavaToNativeString(jni, j_label), &init);
+ if (!result.ok()) {
+ return WrapNativeDataChannel(jni, nullptr);
+ }
+ return WrapNativeDataChannel(jni, result.MoveValue());
+}
+
+static void JNI_PeerConnection_CreateOffer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
+ ExtractNativePC(jni, j_pc)->CreateOffer(observer.get(), options);
+}
+
+static void JNI_PeerConnection_CreateAnswer(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ auto observer = rtc::make_ref_counted<CreateSdpObserverJni>(
+ jni, j_observer, std::move(constraints));
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ CopyConstraintsIntoOfferAnswerOptions(observer->constraints(), &options);
+ ExtractNativePC(jni, j_pc)->CreateAnswer(observer.get(), options);
+}
+
+static void JNI_PeerConnection_SetLocalDescriptionAutomatically(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer) {
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(observer);
+}
+
+static void JNI_PeerConnection_SetLocalDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_sdp) {
+ auto observer =
+ rtc::make_ref_counted<SetLocalSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(
+ JavaToNativeSessionDescription(jni, j_sdp), observer);
+}
+
+static void JNI_PeerConnection_SetRemoteDescription(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ const JavaParamRef<jobject>& j_sdp) {
+ auto observer =
+ rtc::make_ref_counted<SetRemoteSdpObserverJni>(jni, j_observer);
+ ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+ JavaToNativeSessionDescription(jni, j_sdp), observer);
+}
+
+static void JNI_PeerConnection_RestartIce(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->RestartIce();
+}
+
+static void JNI_PeerConnection_SetAudioPlayout(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jboolean playout) {
+ ExtractNativePC(jni, j_pc)->SetAudioPlayout(playout);
+}
+
+static void JNI_PeerConnection_SetAudioRecording(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jboolean recording) {
+ ExtractNativePC(jni, j_pc)->SetAudioRecording(recording);
+}
+
+static jboolean JNI_PeerConnection_SetConfiguration(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_rtc_config) {
+ // Need to merge constraints into RTCConfiguration again, which are stored
+ // in the OwnedPeerConnection object.
+ OwnedPeerConnection* owned_pc = reinterpret_cast<OwnedPeerConnection*>(
+ Java_PeerConnection_getNativeOwnedPeerConnection(jni, j_pc));
+ PeerConnectionInterface::RTCConfiguration rtc_config(
+ PeerConnectionInterface::RTCConfigurationType::kAggressive);
+ JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
+ if (owned_pc->constraints()) {
+ CopyConstraintsIntoRtcConfiguration(owned_pc->constraints(), &rtc_config);
+ }
+ return owned_pc->pc()->SetConfiguration(rtc_config).ok();
+}
+
+static jboolean JNI_PeerConnection_AddIceCandidate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_sdp_mid,
+ jint j_sdp_mline_index,
+ const JavaParamRef<jstring>& j_candidate_sdp) {
+ std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid);
+ std::string sdp = JavaToNativeString(jni, j_candidate_sdp);
+ std::unique_ptr<IceCandidateInterface> candidate(
+ CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
+ return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+static void JNI_PeerConnection_AddIceCandidateWithObserver(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_sdp_mid,
+ jint j_sdp_mline_index,
+ const JavaParamRef<jstring>& j_candidate_sdp,
+ const JavaParamRef<jobject>& j_observer) {
+ std::string sdp_mid = JavaToNativeString(jni, j_sdp_mid);
+ std::string sdp = JavaToNativeString(jni, j_candidate_sdp);
+ std::unique_ptr<IceCandidateInterface> candidate(
+ CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
+
+ rtc::scoped_refptr<AddIceCandidateObserverJni> observer(
+ new AddIceCandidateObserverJni(jni, j_observer));
+ ExtractNativePC(jni, j_pc)->AddIceCandidate(
+ std::move(candidate),
+ [observer](RTCError error) { observer->OnComplete(error); });
+}
+
+static jboolean JNI_PeerConnection_RemoveIceCandidates(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobjectArray>& j_candidates) {
+ std::vector<cricket::Candidate> candidates =
+ JavaToNativeVector<cricket::Candidate>(jni, j_candidates,
+ &JavaToNativeCandidate);
+ return ExtractNativePC(jni, j_pc)->RemoveIceCandidates(candidates);
+}
+
+static jboolean JNI_PeerConnection_AddLocalStream(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_stream) {
+ return ExtractNativePC(jni, j_pc)->AddStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+static void JNI_PeerConnection_RemoveLocalStream(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_stream) {
+ ExtractNativePC(jni, j_pc)->RemoveStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_CreateSender(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jstring>& j_kind,
+ const JavaParamRef<jstring>& j_stream_id) {
+ std::string kind = JavaToNativeString(jni, j_kind);
+ std::string stream_id = JavaToNativeString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ return NativeToJavaRtpSender(jni, sender);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetSenders(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetSenders(),
+ &NativeToJavaRtpSender);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetReceivers(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetReceivers(),
+ &NativeToJavaRtpReceiver);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_GetTransceivers(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ return NativeToJavaList(jni, ExtractNativePC(jni, j_pc)->GetTransceivers(),
+ &NativeToJavaRtpTransceiver);
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const jlong native_track,
+ const JavaParamRef<jobject>& j_stream_labels) {
+ RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTrack(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
+ JavaListToNativeVector<std::string, jstring>(jni, j_stream_labels,
+ &JavaToNativeString));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add track: " << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpSender(jni, result.MoveValue());
+ }
+}
+
+static jboolean JNI_PeerConnection_RemoveTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_sender) {
+ return ExtractNativePC(jni, j_pc)
+ ->RemoveTrackOrError(rtc::scoped_refptr<RtpSenderInterface>(
+ reinterpret_cast<RtpSenderInterface*>(native_sender)))
+ .ok();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverWithTrack(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ jlong native_track,
+ const JavaParamRef<jobject>& j_init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTransceiver(
+ rtc::scoped_refptr<MediaStreamTrackInterface>(
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track)),
+ JavaToNativeRtpTransceiverInit(jni, j_init));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add transceiver: "
+ << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpTransceiver(jni, result.MoveValue());
+ }
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_AddTransceiverOfType(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_media_type,
+ const JavaParamRef<jobject>& j_init) {
+ RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>> result =
+ ExtractNativePC(jni, j_pc)->AddTransceiver(
+ JavaToNativeMediaType(jni, j_media_type),
+ JavaToNativeRtpTransceiverInit(jni, j_init));
+ if (!result.ok()) {
+ RTC_LOG(LS_ERROR) << "Failed to add transceiver: "
+ << result.error().message();
+ return nullptr;
+ } else {
+ return NativeToJavaRtpTransceiver(jni, result.MoveValue());
+ }
+}
+
+static jboolean JNI_PeerConnection_OldGetStats(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_observer,
+ jlong native_track) {
+ auto observer = rtc::make_ref_counted<StatsObserverJni>(jni, j_observer);
+ return ExtractNativePC(jni, j_pc)->GetStats(
+ observer.get(),
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+static void JNI_PeerConnection_NewGetStats(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_callback) {
+ auto callback =
+ rtc::make_ref_counted<RTCStatsCollectorCallbackWrapper>(jni, j_callback);
+ ExtractNativePC(jni, j_pc)->GetStats(callback.get());
+}
+
+static jboolean JNI_PeerConnection_SetBitrate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ const JavaParamRef<jobject>& j_min,
+ const JavaParamRef<jobject>& j_current,
+ const JavaParamRef<jobject>& j_max) {
+ BitrateSettings params;
+ params.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min);
+ params.start_bitrate_bps = JavaToNativeOptionalInt(jni, j_current);
+ params.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max);
+ return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok();
+}
+
+static jboolean JNI_PeerConnection_StartRtcEventLog(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc,
+ int file_descriptor,
+ int max_size_bytes) {
+ // TODO(eladalon): It would be better to not allow negative values into PC.
+ const size_t max_size = (max_size_bytes < 0)
+ ? RtcEventLog::kUnlimitedOutput
+ : rtc::saturated_cast<size_t>(max_size_bytes);
+ FILE* f = fdopen(file_descriptor, "wb");
+ if (!f) {
+ close(file_descriptor);
+ return false;
+ }
+ return ExtractNativePC(jni, j_pc)->StartRtcEventLog(
+ std::make_unique<RtcEventLogOutputFile>(f, max_size));
+}
+
+static void JNI_PeerConnection_StopRtcEventLog(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->StopRtcEventLog();
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_SignalingState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_SignalingState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->signaling_state());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceConnectionState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_IceConnectionState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->ice_connection_state());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_ConnectionState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_PeerConnectionState_fromNativeIndex(
+ env,
+ static_cast<int>(ExtractNativePC(env, j_pc)->peer_connection_state()));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_PeerConnection_IceGatheringState(
+ JNIEnv* env,
+ const JavaParamRef<jobject>& j_pc) {
+ return Java_IceGatheringState_fromNativeIndex(
+ env, ExtractNativePC(env, j_pc)->ice_gathering_state());
+}
+
+static void JNI_PeerConnection_Close(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_pc) {
+ ExtractNativePC(jni, j_pc)->Close();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h
new file mode 100644
index 0000000000..9976e8e4f5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection.h
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
+#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "api/peer_connection_interface.h"
+#include "pc/media_stream_observer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_constraints.h"
+#include "sdk/android/src/jni/pc/media_stream.h"
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+#include "sdk/android/src/jni/pc/rtp_transceiver.h"
+
+namespace webrtc {
+namespace jni {
+
+void JavaToNativeRTCConfiguration(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config);
+
+rtc::KeyType GetRtcConfigKeyType(JNIEnv* env,
+ const JavaRef<jobject>& j_rtc_config);
+
+ScopedJavaLocalRef<jobject> NativeToJavaAdapterType(JNIEnv* env,
+ int adapterType);
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface. Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PeerConnectionObserverJni : public PeerConnectionObserver {
+ public:
+ PeerConnectionObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~PeerConnectionObserverJni() override;
+
+ // Implementation of PeerConnectionObserver interface, which propagates
+ // the callbacks to the Java observer.
+ void OnIceCandidate(const IceCandidateInterface* candidate) override;
+ void OnIceCandidateError(const std::string& address,
+ int port,
+ const std::string& url,
+ int error_code,
+ const std::string& error_text) override;
+
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override;
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override;
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnStandardizedIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override;
+ void OnConnectionChange(
+ PeerConnectionInterface::PeerConnectionState new_state) override;
+ void OnIceConnectionReceivingChange(bool receiving) override;
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override;
+ void OnIceSelectedCandidatePairChanged(
+ const cricket::CandidatePairChangeEvent& event) override;
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+ void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
+ void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override;
+ void OnRenegotiationNeeded() override;
+ void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
+ streams) override;
+ void OnTrack(
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
+ void OnRemoveTrack(
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
+
+ private:
+ typedef std::map<MediaStreamInterface*, JavaMediaStream>
+ NativeToJavaStreamsMap;
+ typedef std::map<MediaStreamTrackInterface*, RtpReceiverInterface*>
+ NativeMediaStreamTrackToNativeRtpReceiver;
+
+ // If the NativeToJavaStreamsMap contains the stream, return it.
+ // Otherwise, create a new Java MediaStream. Returns a global jobject.
+ JavaMediaStream& GetOrCreateJavaStream(
+ JNIEnv* env,
+ const rtc::scoped_refptr<MediaStreamInterface>& stream);
+
+ // Converts array of streams, creating or re-using Java streams as necessary.
+ ScopedJavaLocalRef<jobjectArray> NativeToJavaMediaStreamArray(
+ JNIEnv* jni,
+ const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
+
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+
+ // C++ -> Java remote streams.
+ NativeToJavaStreamsMap remote_streams_;
+ std::vector<JavaRtpReceiverGlobalOwner> rtp_receivers_;
+ // Holds a reference to the Java transceivers given to the AddTrack
+ // callback, so that the shared ownership by the Java object will be
+ // properly disposed.
+ std::vector<JavaRtpTransceiverGlobalOwner> rtp_transceivers_;
+};
+
+// PeerConnection doesn't take ownership of the observer. In Java API, we don't
+// want the client to have to manually dispose the observer. To solve this, this
+// wrapper class is used for object ownership.
+//
+// Also stores reference to the deprecated PeerConnection constraints for now.
+class OwnedPeerConnection {
+ public:
+ OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer);
+ // Deprecated. PC constraints are deprecated.
+ OwnedPeerConnection(
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection,
+ std::unique_ptr<PeerConnectionObserver> observer,
+ std::unique_ptr<MediaConstraints> constraints);
+ ~OwnedPeerConnection();
+
+ PeerConnectionInterface* pc() const { return peer_connection_.get(); }
+ const MediaConstraints* constraints() const { return constraints_.get(); }
+
+ private:
+ rtc::scoped_refptr<PeerConnectionInterface> peer_connection_;
+ std::unique_ptr<PeerConnectionObserver> observer_;
+ std::unique_ptr<MediaConstraints> constraints_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
new file mode 100644
index 0000000000..fafcad3caf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.cc
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/peer_connection_factory.h"
+
+#include <memory>
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/base/media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/utility/include/jvm_android.h"
+// We don't depend on the audio processing module implementation.
+// The user may pass in a nullptr.
+#include "api/call/call_factory_interface.h"
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "rtc_base/event_tracer.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/generated_peerconnection_jni/PeerConnectionFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/native_api/stacktrace/stacktrace.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/logging/log_sink.h"
+#include "sdk/android/src/jni/pc/android_network_monitor.h"
+#include "sdk/android/src/jni/pc/audio.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+#include "sdk/android/src/jni/pc/owned_factory_and_threads.h"
+#include "sdk/android/src/jni/pc/peer_connection.h"
+#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h"
+#include "sdk/android/src/jni/pc/video.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Take ownership of the jlong reference and cast it into an rtc::scoped_refptr.
+template <typename T>
+rtc::scoped_refptr<T> TakeOwnershipOfRefPtr(jlong j_pointer) {
+ T* ptr = reinterpret_cast<T*>(j_pointer);
+ rtc::scoped_refptr<T> refptr;
+ refptr.swap(&ptr);
+ return refptr;
+}
+
+// Take ownership of the jlong reference and cast it into a std::unique_ptr.
+template <typename T>
+std::unique_ptr<T> TakeOwnershipOfUniquePtr(jlong native_pointer) {
+ return std::unique_ptr<T>(reinterpret_cast<T*>(native_pointer));
+}
+
+typedef void (*JavaMethodPointer)(JNIEnv*, const JavaRef<jobject>&);
+
+// Post a message on the given thread that will call the Java method on the
+// given Java object.
+void PostJavaCallback(JNIEnv* env,
+ rtc::Thread* queue,
+ const rtc::Location& posted_from,
+ const JavaRef<jobject>& j_object,
+ JavaMethodPointer java_method_pointer) {
+ // One-off message handler that calls the Java method on the specified Java
+ // object before deleting itself.
+ class JavaAsyncCallback : public rtc::MessageHandler {
+ public:
+ JavaAsyncCallback(JNIEnv* env,
+ const JavaRef<jobject>& j_object,
+ JavaMethodPointer java_method_pointer)
+ : j_object_(env, j_object), java_method_pointer_(java_method_pointer) {}
+
+ void OnMessage(rtc::Message*) override {
+ java_method_pointer_(AttachCurrentThreadIfNeeded(), j_object_);
+ // The message has been delivered, clean up after ourself.
+ delete this;
+ }
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_object_;
+ JavaMethodPointer java_method_pointer_;
+ };
+
+ queue->Post(posted_from,
+ new JavaAsyncCallback(env, j_object, java_method_pointer));
+}
+
+absl::optional<PeerConnectionFactoryInterface::Options>
+JavaToNativePeerConnectionFactoryOptions(JNIEnv* jni,
+ const JavaRef<jobject>& j_options) {
+ if (j_options.is_null())
+ return absl::nullopt;
+
+ PeerConnectionFactoryInterface::Options native_options;
+
+ // This doesn't necessarily match the c++ version of this struct; feel free
+ // to add more parameters as necessary.
+ native_options.network_ignore_mask =
+ Java_Options_getNetworkIgnoreMask(jni, j_options);
+ native_options.disable_encryption =
+ Java_Options_getDisableEncryption(jni, j_options);
+ native_options.disable_network_monitor =
+ Java_Options_getDisableNetworkMonitor(jni, j_options);
+
+ return native_options;
+}
+
+// Place static objects into a container that gets leaked so we avoid
+// non-trivial destructor.
+struct StaticObjectContainer {
+ // Field trials initialization string
+ std::unique_ptr<std::string> field_trials_init_string;
+ // Set in PeerConnectionFactory_InjectLoggable().
+ std::unique_ptr<JNILogSink> jni_log_sink;
+};
+
+StaticObjectContainer& GetStaticObjects() {
+ static StaticObjectContainer* static_objects = new StaticObjectContainer();
+ return *static_objects;
+}
+
+ScopedJavaLocalRef<jobject> NativeToScopedJavaPeerConnectionFactory(
+ JNIEnv* env,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread) {
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ std::move(socket_factory), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread), pcf);
+
+ ScopedJavaLocalRef<jobject> j_pcf = Java_PeerConnectionFactory_Constructor(
+ env, NativeToJavaPointer(owned_factory));
+
+ PostJavaCallback(env, owned_factory->network_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onNetworkThreadReady);
+ PostJavaCallback(env, owned_factory->worker_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onWorkerThreadReady);
+ PostJavaCallback(env, owned_factory->signaling_thread(), RTC_FROM_HERE, j_pcf,
+ &Java_PeerConnectionFactory_onSignalingThreadReady);
+
+ return j_pcf;
+}
+
+PeerConnectionFactoryInterface* PeerConnectionFactoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+} // namespace
+
+// Note: Some of the video-specific PeerConnectionFactory methods are
+// implemented in "video.cc". This is done so that if an application
+// doesn't need video support, it can just link with "null_video.cc"
+// instead of "video.cc", which doesn't bring in the video-specific
+// dependencies.
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread) {
+ return NativeToScopedJavaPeerConnectionFactory(
+ jni, pcf, std::move(socket_factory), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread))
+ .Release();
+}
+
+static void JNI_PeerConnectionFactory_InitializeAndroidGlobals(JNIEnv* jni) {
+ if (!factory_static_initialized) {
+ JVM::Initialize(GetJVM());
+ factory_static_initialized = true;
+ }
+}
+
+static void JNI_PeerConnectionFactory_InitializeFieldTrials(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_trials_init_string) {
+ std::unique_ptr<std::string>& field_trials_init_string =
+ GetStaticObjects().field_trials_init_string;
+
+ if (j_trials_init_string.is_null()) {
+ field_trials_init_string = nullptr;
+ field_trial::InitFieldTrialsFromString(nullptr);
+ return;
+ }
+ field_trials_init_string = std::make_unique<std::string>(
+ JavaToNativeString(jni, j_trials_init_string));
+ RTC_LOG(LS_INFO) << "initializeFieldTrials: " << *field_trials_init_string;
+ field_trial::InitFieldTrialsFromString(field_trials_init_string->c_str());
+}
+
+static void JNI_PeerConnectionFactory_InitializeInternalTracer(JNIEnv* jni) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+static ScopedJavaLocalRef<jstring>
+JNI_PeerConnectionFactory_FindFieldTrialsFullName(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_name) {
+ return NativeToJavaString(
+ jni, field_trial::FindFullName(JavaToStdString(jni, j_name)));
+}
+
+static jboolean JNI_PeerConnectionFactory_StartInternalTracingCapture(
+ JNIEnv* jni,
+ const JavaParamRef<jstring>& j_event_tracing_filename) {
+ if (j_event_tracing_filename.is_null())
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename.obj(), NULL);
+ RTC_LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename.obj(), init_string);
+ return ret;
+}
+
+static void JNI_PeerConnectionFactory_StopInternalTracingCapture(JNIEnv* jni) {
+ rtc::tracing::StopInternalCapture();
+}
+
+static void JNI_PeerConnectionFactory_ShutdownInternalTracer(JNIEnv* jni) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
+// Following parameters are optional:
+// `audio_device_module`, `jencoder_factory`, `jdecoder_factory`,
+// `audio_processor`, `fec_controller_factory`,
+// `network_state_predictor_factory`, `neteq_factory`.
+ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& jcontext,
+ const JavaParamRef<jobject>& joptions,
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
+ rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
+ rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
+ const JavaParamRef<jobject>& jencoder_factory,
+ const JavaParamRef<jobject>& jdecoder_factory,
+ rtc::scoped_refptr<AudioProcessing> audio_processor,
+ std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory,
+ std::unique_ptr<NetworkControllerFactoryInterface>
+ network_controller_factory,
+ std::unique_ptr<NetworkStatePredictorFactoryInterface>
+ network_state_predictor_factory,
+ std::unique_ptr<NetEqFactory> neteq_factory) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
+ // think about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ auto socket_server = std::make_unique<rtc::PhysicalSocketServer>();
+ auto network_thread = std::make_unique<rtc::Thread>(socket_server.get());
+ network_thread->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
+ worker_thread->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
+
+ const absl::optional<PeerConnectionFactoryInterface::Options> options =
+ JavaToNativePeerConnectionFactoryOptions(jni, joptions);
+
+ PeerConnectionFactoryDependencies dependencies;
+ // TODO(bugs.webrtc.org/13145): Also add socket_server.get() to the
+ // dependencies.
+ dependencies.network_thread = network_thread.get();
+ dependencies.worker_thread = worker_thread.get();
+ dependencies.signaling_thread = signaling_thread.get();
+ dependencies.task_queue_factory = CreateDefaultTaskQueueFactory();
+ dependencies.call_factory = CreateCallFactory();
+ dependencies.event_log_factory = std::make_unique<RtcEventLogFactory>(
+ dependencies.task_queue_factory.get());
+ dependencies.fec_controller_factory = std::move(fec_controller_factory);
+ dependencies.network_controller_factory =
+ std::move(network_controller_factory);
+ dependencies.network_state_predictor_factory =
+ std::move(network_state_predictor_factory);
+ dependencies.neteq_factory = std::move(neteq_factory);
+ if (!(options && options->disable_network_monitor)) {
+ dependencies.network_monitor_factory =
+ std::make_unique<AndroidNetworkMonitorFactory>();
+ }
+
+ cricket::MediaEngineDependencies media_dependencies;
+ media_dependencies.task_queue_factory = dependencies.task_queue_factory.get();
+ media_dependencies.adm = std::move(audio_device_module);
+ media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory);
+ media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory);
+ media_dependencies.audio_processing = std::move(audio_processor);
+ media_dependencies.video_encoder_factory =
+ absl::WrapUnique(CreateVideoEncoderFactory(jni, jencoder_factory));
+ media_dependencies.video_decoder_factory =
+ absl::WrapUnique(CreateVideoDecoderFactory(jni, jdecoder_factory));
+ dependencies.media_engine =
+ cricket::CreateMediaEngine(std::move(media_dependencies));
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory =
+ CreateModularPeerConnectionFactory(std::move(dependencies));
+
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ "WebRTC/libjingle init likely failed on this device";
+ // TODO(honghaiz): Maybe put the options as the argument of
+ // CreatePeerConnectionFactory.
+ if (options)
+ factory->SetOptions(*options);
+
+ return NativeToScopedJavaPeerConnectionFactory(
+ jni, factory, std::move(socket_server), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread));
+}
+
+static ScopedJavaLocalRef<jobject>
+JNI_PeerConnectionFactory_CreatePeerConnectionFactory(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& jcontext,
+ const JavaParamRef<jobject>& joptions,
+ jlong native_audio_device_module,
+ jlong native_audio_encoder_factory,
+ jlong native_audio_decoder_factory,
+ const JavaParamRef<jobject>& jencoder_factory,
+ const JavaParamRef<jobject>& jdecoder_factory,
+ jlong native_audio_processor,
+ jlong native_fec_controller_factory,
+ jlong native_network_controller_factory,
+ jlong native_network_state_predictor_factory,
+ jlong native_neteq_factory) {
+ rtc::scoped_refptr<AudioProcessing> audio_processor(
+ reinterpret_cast<AudioProcessing*>(native_audio_processor));
+ return CreatePeerConnectionFactoryForJava(
+ jni, jcontext, joptions,
+ rtc::scoped_refptr<AudioDeviceModule>(
+ reinterpret_cast<AudioDeviceModule*>(native_audio_device_module)),
+ TakeOwnershipOfRefPtr<AudioEncoderFactory>(native_audio_encoder_factory),
+ TakeOwnershipOfRefPtr<AudioDecoderFactory>(native_audio_decoder_factory),
+ jencoder_factory, jdecoder_factory,
+ audio_processor ? audio_processor : CreateAudioProcessing(),
+ TakeOwnershipOfUniquePtr<FecControllerFactoryInterface>(
+ native_fec_controller_factory),
+ TakeOwnershipOfUniquePtr<NetworkControllerFactoryInterface>(
+ native_network_controller_factory),
+ TakeOwnershipOfUniquePtr<NetworkStatePredictorFactoryInterface>(
+ native_network_state_predictor_factory),
+ TakeOwnershipOfUniquePtr<NetEqFactory>(native_neteq_factory));
+}
+
+static void JNI_PeerConnectionFactory_FreeFactory(JNIEnv*,
+ jlong j_p) {
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ field_trial::InitFieldTrialsFromString(nullptr);
+ GetStaticObjects().field_trials_init_string = nullptr;
+}
+
+static jlong JNI_PeerConnectionFactory_CreateLocalMediaStream(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& label) {
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateLocalMediaStream(JavaToStdString(jni, label)));
+ return jlongFromPointer(stream.release());
+}
+
+static jlong JNI_PeerConnectionFactory_CreateAudioSource(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jobject>& j_constraints) {
+ std::unique_ptr<MediaConstraints> constraints =
+ JavaToNativeMediaConstraints(jni, j_constraints);
+ cricket::AudioOptions options;
+ CopyConstraintsIntoAudioOptions(constraints.get(), &options);
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateAudioSource(options));
+ return jlongFromPointer(source.release());
+}
+
+jlong JNI_PeerConnectionFactory_CreateAudioTrack(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& id,
+ jlong native_source) {
+ rtc::scoped_refptr<AudioTrackInterface> track(
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
+ return jlongFromPointer(track.release());
+}
+
+static jboolean JNI_PeerConnectionFactory_StartAecDump(
+ JNIEnv* jni,
+ jlong native_factory,
+ jint file_descriptor,
+ jint filesize_limit_bytes) {
+ FILE* f = fdopen(file_descriptor, "wb");
+ if (!f) {
+ close(file_descriptor);
+ return false;
+ }
+
+ return PeerConnectionFactoryFromJava(native_factory)
+ ->StartAecDump(f, filesize_limit_bytes);
+}
+
+static void JNI_PeerConnectionFactory_StopAecDump(JNIEnv* jni,
+ jlong native_factory) {
+ PeerConnectionFactoryFromJava(native_factory)->StopAecDump();
+}
+
+static jlong JNI_PeerConnectionFactory_CreatePeerConnection(
+ JNIEnv* jni,
+ jlong factory,
+ const JavaParamRef<jobject>& j_rtc_config,
+ const JavaParamRef<jobject>& j_constraints,
+ jlong observer_p,
+ const JavaParamRef<jobject>& j_sslCertificateVerifier) {
+ std::unique_ptr<PeerConnectionObserver> observer(
+ reinterpret_cast<PeerConnectionObserver*>(observer_p));
+
+ PeerConnectionInterface::RTCConfiguration rtc_config(
+ PeerConnectionInterface::RTCConfigurationType::kAggressive);
+ JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+ if (rtc_config.certificates.empty()) {
+ // Generate non-default certificate.
+ rtc::KeyType key_type = GetRtcConfigKeyType(jni, j_rtc_config);
+ if (key_type != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(key_type), absl::nullopt);
+ if (!certificate) {
+ RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
+ << key_type;
+ return 0;
+ }
+ rtc_config.certificates.push_back(certificate);
+ }
+ }
+
+ std::unique_ptr<MediaConstraints> constraints;
+ if (!j_constraints.is_null()) {
+ constraints = JavaToNativeMediaConstraints(jni, j_constraints);
+ CopyConstraintsIntoRtcConfiguration(constraints.get(), &rtc_config);
+ }
+
+ PeerConnectionDependencies peer_connection_dependencies(observer.get());
+ if (!j_sslCertificateVerifier.is_null()) {
+ peer_connection_dependencies.tls_cert_verifier =
+ std::make_unique<SSLCertificateVerifierWrapper>(
+ jni, j_sslCertificateVerifier);
+ }
+
+ auto result =
+ PeerConnectionFactoryFromJava(factory)->CreatePeerConnectionOrError(
+ rtc_config, std::move(peer_connection_dependencies));
+ if (!result.ok())
+ return 0;
+
+ return jlongFromPointer(new OwnedPeerConnection(
+ result.MoveValue(), std::move(observer), std::move(constraints)));
+}
+
+static jlong JNI_PeerConnectionFactory_CreateVideoSource(
+ JNIEnv* jni,
+ jlong native_factory,
+ jboolean is_screencast,
+ jboolean align_timestamps) {
+ OwnedFactoryAndThreads* factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(),
+ factory->worker_thread(),
+ is_screencast, align_timestamps));
+}
+
+static jlong JNI_PeerConnectionFactory_CreateVideoTrack(
+ JNIEnv* jni,
+ jlong native_factory,
+ const JavaParamRef<jstring>& id,
+ jlong native_source) {
+ rtc::scoped_refptr<VideoTrackInterface> track =
+ PeerConnectionFactoryFromJava(native_factory)
+ ->CreateVideoTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<VideoTrackSourceInterface*>(native_source));
+ return jlongFromPointer(track.release());
+}
+
+static jlong JNI_PeerConnectionFactory_GetNativePeerConnectionFactory(
+ JNIEnv* jni,
+ jlong native_factory) {
+ return jlongFromPointer(PeerConnectionFactoryFromJava(native_factory));
+}
+
+static void JNI_PeerConnectionFactory_InjectLoggable(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_logging,
+ jint nativeSeverity) {
+ std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink;
+
+ // If there is already a LogSink, remove it from LogMessage.
+ if (jni_log_sink) {
+ rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+ }
+ jni_log_sink = std::make_unique<JNILogSink>(jni, j_logging);
+ rtc::LogMessage::AddLogToStream(
+ jni_log_sink.get(), static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ rtc::LogMessage::LogToDebug(rtc::LS_NONE);
+}
+
+static void JNI_PeerConnectionFactory_DeleteLoggable(JNIEnv* jni) {
+ std::unique_ptr<JNILogSink>& jni_log_sink = GetStaticObjects().jni_log_sink;
+
+ if (jni_log_sink) {
+ rtc::LogMessage::RemoveLogToStream(jni_log_sink.get());
+ jni_log_sink.reset();
+ }
+}
+
+static void JNI_PeerConnectionFactory_PrintStackTrace(JNIEnv* env, jint tid) {
+ RTC_LOG(LS_WARNING) << StackTraceToString(GetStackTrace(tid));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h
new file mode 100644
index 0000000000..b5d5e5dcb7
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/peer_connection_factory.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
+#define SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
+
+#include <jni.h>
+#include "api/peer_connection_interface.h"
+#include "rtc_base/thread.h"
+
+namespace webrtc {
+namespace jni {
+
+// Creates java PeerConnectionFactory with specified `pcf`.
+jobject NativeToJavaPeerConnectionFactory(
+ JNIEnv* jni,
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pcf,
+ std::unique_ptr<rtc::SocketFactory> socket_factory,
+ std::unique_ptr<rtc::Thread> network_thread,
+ std::unique_ptr<rtc::Thread> worker_thread,
+ std::unique_ptr<rtc::Thread> signaling_thread);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_PEER_CONNECTION_FACTORY_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc
new file mode 100644
index 0000000000..f305324ac8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtc_certificate.h"
+#include "sdk/android/src/jni/pc/ice_candidate.h"
+
+#include "rtc_base/ref_count.h"
+#include "rtc_base/rtc_certificate.h"
+#include "rtc_base/rtc_certificate_generator.h"
+#include "sdk/android/generated_peerconnection_jni/RtcCertificatePem_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_certificate) {
+ ScopedJavaLocalRef<jstring> privatekey_field =
+ Java_RtcCertificatePem_getPrivateKey(jni, j_rtc_certificate);
+ ScopedJavaLocalRef<jstring> certificate_field =
+ Java_RtcCertificatePem_getCertificate(jni, j_rtc_certificate);
+ return rtc::RTCCertificatePEM(JavaToNativeString(jni, privatekey_field),
+ JavaToNativeString(jni, certificate_field));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM(
+ JNIEnv* jni,
+ const rtc::RTCCertificatePEM& certificate) {
+ return Java_RtcCertificatePem_Constructor(
+ jni, NativeToJavaString(jni, certificate.private_key()),
+ NativeToJavaString(jni, certificate.certificate()));
+}
+
+static ScopedJavaLocalRef<jobject> JNI_RtcCertificatePem_GenerateCertificate(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_key_type,
+ jlong j_expires) {
+ rtc::KeyType key_type = JavaToNativeKeyType(jni, j_key_type);
+ uint64_t expires = (uint64_t)j_expires;
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(key_type), expires);
+ rtc::RTCCertificatePEM pem = certificate->ToPEM();
+ return Java_RtcCertificatePem_Constructor(
+ jni, NativeToJavaString(jni, pem.private_key()),
+ NativeToJavaString(jni, pem.certificate()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h
new file mode 100644
index 0000000000..91a413cd37
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_certificate.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
+
+#include "rtc_base/ref_count.h"
+#include "rtc_base/rtc_certificate.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::RTCCertificatePEM JavaToNativeRTCCertificatePEM(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_rtc_certificate);
+
+ScopedJavaLocalRef<jobject> NativeToJavaRTCCertificatePEM(
+ JNIEnv* env,
+ const rtc::RTCCertificatePEM& certificate);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTC_CERTIFICATE_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
new file mode 100644
index 0000000000..b8eae739f9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.cc
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h"
+
+#include <string>
+#include <vector>
+
+#include "rtc_base/string_encode.h"
+#include "sdk/android/generated_external_classes_jni/BigInteger_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStatsCollectorCallback_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStatsReport_jni.h"
+#include "sdk/android/generated_peerconnection_jni/RTCStats_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaBigInteger(JNIEnv* env, uint64_t u) {
+ return JNI_BigInteger::Java_BigInteger_ConstructorJMBI_JLS(
+ env, NativeToJavaString(env, rtc::ToString(u)));
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaBigIntegerArray(
+ JNIEnv* env,
+ const std::vector<uint64_t>& container) {
+ return NativeToJavaObjectArray(
+ env, container, java_math_BigInteger_clazz(env), &NativeToJavaBigInteger);
+}
+
+ScopedJavaLocalRef<jobject> MemberToJava(
+ JNIEnv* env,
+ const RTCStatsMemberInterface& member) {
+ switch (member.type()) {
+ case RTCStatsMemberInterface::kBool:
+ return NativeToJavaBoolean(env, *member.cast_to<RTCStatsMember<bool>>());
+
+ case RTCStatsMemberInterface::kInt32:
+ return NativeToJavaInteger(env,
+ *member.cast_to<RTCStatsMember<int32_t>>());
+
+ case RTCStatsMemberInterface::kUint32:
+ return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<uint32_t>>());
+
+ case RTCStatsMemberInterface::kInt64:
+ return NativeToJavaLong(env, *member.cast_to<RTCStatsMember<int64_t>>());
+
+ case RTCStatsMemberInterface::kUint64:
+ return NativeToJavaBigInteger(
+ env, *member.cast_to<RTCStatsMember<uint64_t>>());
+
+ case RTCStatsMemberInterface::kDouble:
+ return NativeToJavaDouble(env, *member.cast_to<RTCStatsMember<double>>());
+
+ case RTCStatsMemberInterface::kString:
+ return NativeToJavaString(env,
+ *member.cast_to<RTCStatsMember<std::string>>());
+
+ case RTCStatsMemberInterface::kSequenceBool:
+ return NativeToJavaBooleanArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<bool>>>());
+
+ case RTCStatsMemberInterface::kSequenceInt32:
+ return NativeToJavaIntegerArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<int32_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceUint32: {
+ const std::vector<uint32_t>& v =
+ *member.cast_to<RTCStatsMember<std::vector<uint32_t>>>();
+ return NativeToJavaLongArray(env,
+ std::vector<int64_t>(v.begin(), v.end()));
+ }
+ case RTCStatsMemberInterface::kSequenceInt64:
+ return NativeToJavaLongArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<int64_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceUint64:
+ return NativeToJavaBigIntegerArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<uint64_t>>>());
+
+ case RTCStatsMemberInterface::kSequenceDouble:
+ return NativeToJavaDoubleArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<double>>>());
+
+ case RTCStatsMemberInterface::kSequenceString:
+ return NativeToJavaStringArray(
+ env, *member.cast_to<RTCStatsMember<std::vector<std::string>>>());
+
+ case RTCStatsMemberInterface::kMapStringUint64:
+ return NativeToJavaMap(
+ env,
+ *member.cast_to<RTCStatsMember<std::map<std::string, uint64_t>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaBigInteger(env, entry.second));
+ });
+
+ case RTCStatsMemberInterface::kMapStringDouble:
+ return NativeToJavaMap(
+ env, *member.cast_to<RTCStatsMember<std::map<std::string, double>>>(),
+ [](JNIEnv* env, const auto& entry) {
+ return std::make_pair(NativeToJavaString(env, entry.first),
+ NativeToJavaDouble(env, entry.second));
+ });
+ }
+ RTC_DCHECK_NOTREACHED();
+ return nullptr;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtcStats(JNIEnv* env,
+ const RTCStats& stats) {
+ JavaMapBuilder builder(env);
+ for (auto* const member : stats.Members()) {
+ if (!member->is_defined())
+ continue;
+ builder.put(NativeToJavaString(env, member->name()),
+ MemberToJava(env, *member));
+ }
+ return Java_RTCStats_create(
+ env, stats.timestamp_us(), NativeToJavaString(env, stats.type()),
+ NativeToJavaString(env, stats.id()), builder.GetJavaMap());
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtcStatsReport(
+ JNIEnv* env,
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ ScopedJavaLocalRef<jobject> j_stats_map =
+ NativeToJavaMap(env, *report, [](JNIEnv* env, const RTCStats& stats) {
+ return std::make_pair(NativeToJavaString(env, stats.id()),
+ NativeToJavaRtcStats(env, stats));
+ });
+ return Java_RTCStatsReport_create(env, report->timestamp_us(), j_stats_map);
+}
+
+} // namespace
+
+RTCStatsCollectorCallbackWrapper::RTCStatsCollectorCallbackWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_callback)
+ : j_callback_global_(jni, j_callback) {}
+
+RTCStatsCollectorCallbackWrapper::~RTCStatsCollectorCallbackWrapper() = default;
+
+void RTCStatsCollectorCallbackWrapper::OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_RTCStatsCollectorCallback_onStatsDelivered(
+ jni, j_callback_global_, NativeToJavaRtcStatsReport(jni, report));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
new file mode 100644
index 0000000000..50fad1844d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtc_stats_collector_callback_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
+
+#include <jni.h>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Adapter for a Java RTCStatsCollectorCallback presenting a C++
+// RTCStatsCollectorCallback and dispatching the callback from C++ back to
+// Java.
+class RTCStatsCollectorCallbackWrapper : public RTCStatsCollectorCallback {
+ public:
+ RTCStatsCollectorCallbackWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& j_callback);
+ ~RTCStatsCollectorCallbackWrapper() override;
+
+ void OnStatsDelivered(
+ const rtc::scoped_refptr<const RTCStatsReport>& report) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_callback_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTC_STATS_COLLECTOR_CALLBACK_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc
new file mode 100644
index 0000000000..4bd9ee0e1d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.cc
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpParameters_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+webrtc::DegradationPreference JavaToNativeDegradationPreference(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_degradation_preference) {
+ std::string enum_name = GetJavaEnumName(jni, j_degradation_preference);
+
+ if (enum_name == "DISABLED")
+ return webrtc::DegradationPreference::DISABLED;
+
+ if (enum_name == "MAINTAIN_FRAMERATE")
+ return webrtc::DegradationPreference::MAINTAIN_FRAMERATE;
+
+ if (enum_name == "MAINTAIN_RESOLUTION")
+ return webrtc::DegradationPreference::MAINTAIN_RESOLUTION;
+
+ if (enum_name == "BALANCED")
+ return webrtc::DegradationPreference::BALANCED;
+
+ RTC_CHECK(false) << "Unexpected DegradationPreference enum_name "
+ << enum_name;
+ return webrtc::DegradationPreference::DISABLED;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpEncodingParameter(
+ JNIEnv* env,
+ const RtpEncodingParameters& encoding) {
+ return Java_Encoding_Constructor(
+ env, NativeToJavaString(env, encoding.rid), encoding.active,
+ encoding.bitrate_priority, static_cast<int>(encoding.network_priority),
+ NativeToJavaInteger(env, encoding.max_bitrate_bps),
+ NativeToJavaInteger(env, encoding.min_bitrate_bps),
+ NativeToJavaInteger(env, encoding.max_framerate),
+ NativeToJavaInteger(env, encoding.num_temporal_layers),
+ NativeToJavaDouble(env, encoding.scale_resolution_down_by),
+ encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr,
+ encoding.adaptive_ptime);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpCodecParameter(
+ JNIEnv* env,
+ const RtpCodecParameters& codec) {
+ return Java_Codec_Constructor(env, codec.payload_type,
+ NativeToJavaString(env, codec.name),
+ NativeToJavaMediaType(env, codec.kind),
+ NativeToJavaInteger(env, codec.clock_rate),
+ NativeToJavaInteger(env, codec.num_channels),
+ NativeToJavaStringMap(env, codec.parameters));
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpRtcpParameters(
+ JNIEnv* env,
+ const RtcpParameters& rtcp) {
+ return Java_Rtcp_Constructor(env, NativeToJavaString(env, rtcp.cname),
+ rtcp.reduced_size);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpHeaderExtensionParameter(
+ JNIEnv* env,
+ const RtpExtension& extension) {
+ return Java_HeaderExtension_Constructor(
+ env, NativeToJavaString(env, extension.uri), extension.id,
+ extension.encrypt);
+}
+
+} // namespace
+
+RtpEncodingParameters JavaToNativeRtpEncodingParameters(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoding_parameters) {
+ RtpEncodingParameters encoding;
+ ScopedJavaLocalRef<jstring> j_rid =
+ Java_Encoding_getRid(jni, j_encoding_parameters);
+ if (!IsNull(jni, j_rid)) {
+ encoding.rid = JavaToNativeString(jni, j_rid);
+ }
+ encoding.active = Java_Encoding_getActive(jni, j_encoding_parameters);
+ ScopedJavaLocalRef<jobject> j_max_bitrate =
+ Java_Encoding_getMaxBitrateBps(jni, j_encoding_parameters);
+ encoding.bitrate_priority =
+ Java_Encoding_getBitratePriority(jni, j_encoding_parameters);
+ encoding.network_priority = static_cast<webrtc::Priority>(
+ Java_Encoding_getNetworkPriority(jni, j_encoding_parameters));
+ encoding.max_bitrate_bps = JavaToNativeOptionalInt(jni, j_max_bitrate);
+ ScopedJavaLocalRef<jobject> j_min_bitrate =
+ Java_Encoding_getMinBitrateBps(jni, j_encoding_parameters);
+ encoding.min_bitrate_bps = JavaToNativeOptionalInt(jni, j_min_bitrate);
+ ScopedJavaLocalRef<jobject> j_max_framerate =
+ Java_Encoding_getMaxFramerate(jni, j_encoding_parameters);
+ encoding.max_framerate = JavaToNativeOptionalInt(jni, j_max_framerate);
+ ScopedJavaLocalRef<jobject> j_num_temporal_layers =
+ Java_Encoding_getNumTemporalLayers(jni, j_encoding_parameters);
+ encoding.num_temporal_layers =
+ JavaToNativeOptionalInt(jni, j_num_temporal_layers);
+ ScopedJavaLocalRef<jobject> j_scale_resolution_down_by =
+ Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters);
+ encoding.scale_resolution_down_by =
+ JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by);
+ encoding.adaptive_ptime =
+ Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters);
+ ScopedJavaLocalRef<jobject> j_ssrc =
+ Java_Encoding_getSsrc(jni, j_encoding_parameters);
+ if (!IsNull(jni, j_ssrc))
+ encoding.ssrc = JavaToNativeLong(jni, j_ssrc);
+ return encoding;
+}
+
+RtpParameters JavaToNativeRtpParameters(JNIEnv* jni,
+ const JavaRef<jobject>& j_parameters) {
+ RtpParameters parameters;
+
+ ScopedJavaLocalRef<jstring> j_transaction_id =
+ Java_RtpParameters_getTransactionId(jni, j_parameters);
+ parameters.transaction_id = JavaToNativeString(jni, j_transaction_id);
+
+ ScopedJavaLocalRef<jobject> j_degradation_preference =
+ Java_RtpParameters_getDegradationPreference(jni, j_parameters);
+ if (!IsNull(jni, j_degradation_preference)) {
+ parameters.degradation_preference =
+ JavaToNativeDegradationPreference(jni, j_degradation_preference);
+ }
+
+ ScopedJavaLocalRef<jobject> j_rtcp =
+ Java_RtpParameters_getRtcp(jni, j_parameters);
+ ScopedJavaLocalRef<jstring> j_rtcp_cname = Java_Rtcp_getCname(jni, j_rtcp);
+ jboolean j_rtcp_reduced_size = Java_Rtcp_getReducedSize(jni, j_rtcp);
+ parameters.rtcp.cname = JavaToNativeString(jni, j_rtcp_cname);
+ parameters.rtcp.reduced_size = j_rtcp_reduced_size;
+
+ ScopedJavaLocalRef<jobject> j_header_extensions =
+ Java_RtpParameters_getHeaderExtensions(jni, j_parameters);
+ for (const JavaRef<jobject>& j_header_extension :
+ Iterable(jni, j_header_extensions)) {
+ RtpExtension header_extension;
+ header_extension.uri = JavaToStdString(
+ jni, Java_HeaderExtension_getUri(jni, j_header_extension));
+ header_extension.id = Java_HeaderExtension_getId(jni, j_header_extension);
+ header_extension.encrypt =
+ Java_HeaderExtension_getEncrypted(jni, j_header_extension);
+ parameters.header_extensions.push_back(header_extension);
+ }
+
+ // Convert encodings.
+ ScopedJavaLocalRef<jobject> j_encodings =
+ Java_RtpParameters_getEncodings(jni, j_parameters);
+ for (const JavaRef<jobject>& j_encoding_parameters :
+ Iterable(jni, j_encodings)) {
+ RtpEncodingParameters encoding =
+ JavaToNativeRtpEncodingParameters(jni, j_encoding_parameters);
+ parameters.encodings.push_back(encoding);
+ }
+
+ // Convert codecs.
+ ScopedJavaLocalRef<jobject> j_codecs =
+ Java_RtpParameters_getCodecs(jni, j_parameters);
+ for (const JavaRef<jobject>& j_codec : Iterable(jni, j_codecs)) {
+ RtpCodecParameters codec;
+ codec.payload_type = Java_Codec_getPayloadType(jni, j_codec);
+ codec.name = JavaToStdString(jni, Java_Codec_getName(jni, j_codec));
+ codec.kind = JavaToNativeMediaType(jni, Java_Codec_getKind(jni, j_codec));
+ codec.clock_rate =
+ JavaToNativeOptionalInt(jni, Java_Codec_getClockRate(jni, j_codec));
+ codec.num_channels =
+ JavaToNativeOptionalInt(jni, Java_Codec_getNumChannels(jni, j_codec));
+ auto parameters_map =
+ JavaToNativeStringMap(jni, Java_Codec_getParameters(jni, j_codec));
+ codec.parameters.insert(parameters_map.begin(), parameters_map.end());
+ parameters.codecs.push_back(codec);
+ }
+ return parameters;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters(
+ JNIEnv* env,
+ const RtpParameters& parameters) {
+ return Java_RtpParameters_Constructor(
+ env, NativeToJavaString(env, parameters.transaction_id),
+ parameters.degradation_preference.has_value()
+ ? Java_DegradationPreference_fromNativeIndex(
+ env, static_cast<int>(*parameters.degradation_preference))
+ : nullptr,
+ NativeToJavaRtpRtcpParameters(env, parameters.rtcp),
+ NativeToJavaList(env, parameters.header_extensions,
+ &NativeToJavaRtpHeaderExtensionParameter),
+ NativeToJavaList(env, parameters.encodings,
+ &NativeToJavaRtpEncodingParameter),
+ NativeToJavaList(env, parameters.codecs, &NativeToJavaRtpCodecParameter));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h
new file mode 100644
index 0000000000..3bcd343fae
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_parameters.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
+
+#include <jni.h>
+
+#include "api/rtp_parameters.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+RtpEncodingParameters JavaToNativeRtpEncodingParameters(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_encoding_parameters);
+
+RtpParameters JavaToNativeRtpParameters(JNIEnv* jni,
+ const JavaRef<jobject>& j_parameters);
+ScopedJavaLocalRef<jobject> NativeToJavaRtpParameters(
+ JNIEnv* jni,
+ const RtpParameters& parameters);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_PARAMETERS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc
new file mode 100644
index 0000000000..7a3600b424
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpReceiver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+// Adapter between the C++ RtpReceiverObserverInterface and the Java
+// RtpReceiver.Observer interface. Wraps an instance of the Java interface and
+// dispatches C++ callbacks to Java.
+class RtpReceiverObserverJni : public RtpReceiverObserverInterface {
+ public:
+ RtpReceiverObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+ ~RtpReceiverObserverJni() override = default;
+
+ void OnFirstPacketReceived(cricket::MediaType media_type) override {
+ JNIEnv* const env = AttachCurrentThreadIfNeeded();
+ Java_Observer_onFirstPacketReceived(env, j_observer_global_,
+ NativeToJavaMediaType(env, media_type));
+ }
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpReceiverInterface> receiver) {
+ // Receiver is now owned by Java object, and will be freed from there.
+ return Java_RtpReceiver_Constructor(env,
+ jlongFromPointer(receiver.release()));
+}
+
+JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_receiver)
+ : j_receiver_(env, j_receiver) {}
+
+JavaRtpReceiverGlobalOwner::JavaRtpReceiverGlobalOwner(
+ JavaRtpReceiverGlobalOwner&& other) = default;
+
+JavaRtpReceiverGlobalOwner::~JavaRtpReceiverGlobalOwner() {
+ if (j_receiver_.obj())
+ Java_RtpReceiver_dispose(AttachCurrentThreadIfNeeded(), j_receiver_);
+}
+
+static jlong JNI_RtpReceiver_GetTrack(JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java
+ // object.
+ return jlongFromPointer(
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->track()
+ .release());
+}
+
+static ScopedJavaLocalRef<jobject> JNI_RtpReceiver_GetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ RtpParameters parameters =
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->GetParameters();
+ return NativeToJavaRtpParameters(jni, parameters);
+}
+
+static ScopedJavaLocalRef<jstring> JNI_RtpReceiver_GetId(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer) {
+ return NativeToJavaString(
+ jni,
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+static jlong JNI_RtpReceiver_SetObserver(
+ JNIEnv* jni,
+ jlong j_rtp_receiver_pointer,
+ const JavaParamRef<jobject>& j_observer) {
+ RtpReceiverObserverJni* rtpReceiverObserver =
+ new RtpReceiverObserverJni(jni, j_observer);
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->SetObserver(rtpReceiverObserver);
+ return jlongFromPointer(rtpReceiverObserver);
+}
+
+static void JNI_RtpReceiver_UnsetObserver(JNIEnv* jni,
+ jlong j_rtp_receiver_pointer,
+ jlong j_observer_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->SetObserver(nullptr);
+ RtpReceiverObserverJni* observer =
+ reinterpret_cast<RtpReceiverObserverJni*>(j_observer_pointer);
+ if (observer) {
+ delete observer;
+ }
+}
+
+static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_frame_decryptor_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
+ ->SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface>(
+ reinterpret_cast<FrameDecryptorInterface*>(
+ j_frame_decryptor_pointer)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h
new file mode 100644
index 0000000000..ccef44b040
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_receiver.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
+
+#include <jni.h>
+
+#include "api/rtp_receiver_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpReceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpReceiverInterface> receiver);
+
+// Takes ownership of the passed `j_receiver` and stores it as a global
+// reference. Will call dispose() in the dtor.
+class JavaRtpReceiverGlobalOwner {
+ public:
+ JavaRtpReceiverGlobalOwner(JNIEnv* env, const JavaRef<jobject>& j_receiver);
+ JavaRtpReceiverGlobalOwner(JavaRtpReceiverGlobalOwner&& other);
+ ~JavaRtpReceiverGlobalOwner();
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_receiver_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_RECEIVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc
new file mode 100644
index 0000000000..233a353654
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+
+#include "sdk/android/generated_peerconnection_jni/RtpSender_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpSenderInterface> sender) {
+ if (!sender)
+ return nullptr;
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ return Java_RtpSender_Constructor(env, jlongFromPointer(sender.release()));
+}
+
+static jboolean JNI_RtpSender_SetTrack(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+jlong JNI_RtpSender_GetTrack(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ // MediaStreamTrack will have shared ownership by the MediaStreamTrack Java
+ // object.
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->track()
+ .release());
+}
+
+static void JNI_RtpSender_SetStreams(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ const JavaParamRef<jobject>& j_stream_labels) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetStreams(JavaListToNativeVector<std::string, jstring>(
+ jni, j_stream_labels, &JavaToNativeString));
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpSender_GetStreams(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ ScopedJavaLocalRef<jstring> (*convert_function)(JNIEnv*, const std::string&) =
+ &NativeToJavaString;
+ return NativeToJavaList(
+ jni,
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->stream_ids(),
+ convert_function);
+}
+
+jlong JNI_RtpSender_GetDtmfSender(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->GetDtmfSender()
+ .release());
+}
+
+jboolean JNI_RtpSender_SetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ const JavaParamRef<jobject>& j_parameters) {
+ if (IsNull(jni, j_parameters)) {
+ return false;
+ }
+ RtpParameters parameters = JavaToNativeRtpParameters(jni, j_parameters);
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetParameters(parameters)
+ .ok();
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpSender_GetParameters(
+ JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ RtpParameters parameters =
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->GetParameters();
+ return NativeToJavaRtpParameters(jni, parameters);
+}
+
+ScopedJavaLocalRef<jstring> JNI_RtpSender_GetId(JNIEnv* jni,
+ jlong j_rtp_sender_pointer) {
+ return NativeToJavaString(
+ jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni,
+ jlong j_rtp_sender_pointer,
+ jlong j_frame_encryptor_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetFrameEncryptor(rtc::scoped_refptr<FrameEncryptorInterface>(
+ reinterpret_cast<FrameEncryptorInterface*>(
+ j_frame_encryptor_pointer)));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h
new file mode 100644
index 0000000000..d782ca915f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_sender.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
+
+#include <jni.h>
+
+#include "api/rtp_sender_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpSender(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpSenderInterface> sender);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_SENDER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc
new file mode 100644
index 0000000000..1d468461f1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.cc
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/rtp_transceiver.h"
+
+#include <string>
+
+#include "sdk/android/generated_peerconnection_jni/RtpTransceiver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/media_stream_track.h"
+#include "sdk/android/src/jni/pc/rtp_parameters.h"
+#include "sdk/android/src/jni/pc/rtp_receiver.h"
+#include "sdk/android/src/jni/pc/rtp_sender.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiverDirection(
+ JNIEnv* jni,
+ RtpTransceiverDirection rtp_transceiver_direction) {
+ return Java_RtpTransceiverDirection_fromNativeIndex(
+ jni, static_cast<int>(rtp_transceiver_direction));
+}
+
+} // namespace
+
+RtpTransceiverInit JavaToNativeRtpTransceiverInit(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_init) {
+ RtpTransceiverInit init;
+
+ // Convert the direction.
+ init.direction = static_cast<RtpTransceiverDirection>(
+ Java_RtpTransceiverInit_getDirectionNativeIndex(jni, j_init));
+
+ // Convert the stream ids.
+ ScopedJavaLocalRef<jobject> j_stream_ids =
+ Java_RtpTransceiverInit_getStreamIds(jni, j_init);
+ init.stream_ids = JavaListToNativeVector<std::string, jstring>(
+ jni, j_stream_ids, &JavaToNativeString);
+
+ // Convert the send encodings.
+ ScopedJavaLocalRef<jobject> j_send_encodings =
+ Java_RtpTransceiverInit_getSendEncodings(jni, j_init);
+ init.send_encodings = JavaListToNativeVector<RtpEncodingParameters, jobject>(
+ jni, j_send_encodings, &JavaToNativeRtpEncodingParameters);
+ return init;
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver) {
+ if (!transceiver) {
+ return nullptr;
+ }
+ // Transceiver will now have shared ownership by the Java object.
+ return Java_RtpTransceiver_Constructor(
+ env, jlongFromPointer(transceiver.release()));
+}
+
+JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_transceiver)
+ : j_transceiver_(env, j_transceiver) {}
+
+JavaRtpTransceiverGlobalOwner::JavaRtpTransceiverGlobalOwner(
+ JavaRtpTransceiverGlobalOwner&& other) = default;
+
+JavaRtpTransceiverGlobalOwner::~JavaRtpTransceiverGlobalOwner() {
+ if (j_transceiver_.obj()) {
+ Java_RtpTransceiver_dispose(AttachCurrentThreadIfNeeded(), j_transceiver_);
+ }
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetMediaType(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaMediaType(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->media_type());
+}
+
+ScopedJavaLocalRef<jstring> JNI_RtpTransceiver_GetMid(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ absl::optional<std::string> mid =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->mid();
+ return NativeToJavaString(jni, mid);
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetSender(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpSender(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->sender());
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_GetReceiver(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpReceiver(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->receiver());
+}
+
+jboolean JNI_RtpTransceiver_Stopped(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->stopped();
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_Direction(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ return NativeToJavaRtpTransceiverDirection(
+ jni, reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->direction());
+}
+
+ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_CurrentDirection(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ absl::optional<RtpTransceiverDirection> direction =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->current_direction();
+ return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)
+ : nullptr;
+}
+
+void JNI_RtpTransceiver_StopInternal(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->StopInternal();
+}
+
+void JNI_RtpTransceiver_StopStandard(JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer) {
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->StopStandard();
+}
+
+jboolean JNI_RtpTransceiver_SetDirection(
+ JNIEnv* jni,
+ jlong j_rtp_transceiver_pointer,
+ const base::android::JavaParamRef<jobject>& j_rtp_transceiver_direction) {
+ if (IsNull(jni, j_rtp_transceiver_direction)) {
+ return false;
+ }
+ RtpTransceiverDirection direction = static_cast<RtpTransceiverDirection>(
+ Java_RtpTransceiverDirection_getNativeIndex(jni,
+ j_rtp_transceiver_direction));
+ webrtc::RTCError error =
+ reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
+ ->SetDirectionWithError(direction);
+ if (!error.ok()) {
+ RTC_LOG(LS_WARNING) << "SetDirection failed, code "
+ << ToString(error.type()) << ", message "
+ << error.message();
+ }
+ return error.ok();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h
new file mode 100644
index 0000000000..5b2d0121ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/rtp_transceiver.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
+
+#include <jni.h>
+
+#include "api/rtp_transceiver_interface.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+RtpTransceiverInit JavaToNativeRtpTransceiverInit(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_init);
+
+ScopedJavaLocalRef<jobject> NativeToJavaRtpTransceiver(
+ JNIEnv* env,
+ rtc::scoped_refptr<RtpTransceiverInterface> transceiver);
+
+// This takes ownership of the of the `j_transceiver` and stores it as a global
+// reference. This calls the Java Transceiver's dispose() method with the dtor.
+class JavaRtpTransceiverGlobalOwner {
+ public:
+ JavaRtpTransceiverGlobalOwner(JNIEnv* env,
+ const JavaRef<jobject>& j_transceiver);
+ JavaRtpTransceiverGlobalOwner(JavaRtpTransceiverGlobalOwner&& other);
+ ~JavaRtpTransceiverGlobalOwner();
+
+ private:
+ ScopedJavaGlobalRef<jobject> j_transceiver_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_RTP_TRANSCEIVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc
new file mode 100644
index 0000000000..c8b4345af4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/sdp_observer.h"
+
+#include <utility>
+
+#include "sdk/android/generated_peerconnection_jni/SdpObserver_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+CreateSdpObserverJni::CreateSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer,
+ std::unique_ptr<MediaConstraints> constraints)
+ : j_observer_global_(env, j_observer),
+ constraints_(std::move(constraints)) {}
+
+CreateSdpObserverJni::~CreateSdpObserverJni() = default;
+
+void CreateSdpObserverJni::OnSuccess(SessionDescriptionInterface* desc) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ std::string sdp;
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ Java_SdpObserver_onCreateSuccess(
+ env, j_observer_global_,
+ NativeToJavaSessionDescription(env, sdp, desc->type()));
+ // OnSuccess transfers ownership of the description (there's a TODO to make
+ // it use unique_ptr...).
+ delete desc;
+}
+
+void CreateSdpObserverJni::OnFailure(webrtc::RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ Java_SdpObserver_onCreateFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+}
+
+SetLocalSdpObserverJni::SetLocalSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void SetLocalSdpObserverJni::OnSetLocalDescriptionComplete(RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_SdpObserver_onSetSuccess(env, j_observer_global_);
+ } else {
+ Java_SdpObserver_onSetFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+SetRemoteSdpObserverJni::SetRemoteSdpObserverJni(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(env, j_observer) {}
+
+void SetRemoteSdpObserverJni::OnSetRemoteDescriptionComplete(RTCError error) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ if (error.ok()) {
+ Java_SdpObserver_onSetSuccess(env, j_observer_global_);
+ } else {
+ Java_SdpObserver_onSetFailure(env, j_observer_global_,
+ NativeToJavaString(env, error.message()));
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h
new file mode 100644
index 0000000000..b33a3018c8
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/sdp_observer.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
+
+#include <memory>
+#include <string>
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/pc/session_description.h"
+#include "sdk/media_constraints.h"
+
+namespace webrtc {
+namespace jni {
+
+class CreateSdpObserverJni : public CreateSessionDescriptionObserver {
+ public:
+ CreateSdpObserverJni(JNIEnv* env,
+ const JavaRef<jobject>& j_observer,
+ std::unique_ptr<MediaConstraints> constraints);
+ ~CreateSdpObserverJni() override;
+
+ MediaConstraints* constraints() { return constraints_.get(); }
+
+ void OnSuccess(SessionDescriptionInterface* desc) override;
+ void OnFailure(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+ std::unique_ptr<MediaConstraints> constraints_;
+};
+
+class SetLocalSdpObserverJni : public SetLocalDescriptionObserverInterface {
+ public:
+ SetLocalSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+
+ ~SetLocalSdpObserverJni() override = default;
+
+ virtual void OnSetLocalDescriptionComplete(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+class SetRemoteSdpObserverJni : public SetRemoteDescriptionObserverInterface {
+ public:
+ SetRemoteSdpObserverJni(JNIEnv* env, const JavaRef<jobject>& j_observer);
+
+ ~SetRemoteSdpObserverJni() override = default;
+
+ virtual void OnSetRemoteDescriptionComplete(RTCError error) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SDP_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc
new file mode 100644
index 0000000000..bbac721e51
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.cc
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/session_description.h"
+
+#include <string>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_peerconnection_jni/SessionDescription_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp) {
+ std::string std_type = JavaToStdString(
+ jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
+ std::string std_description =
+ JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
+ absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
+ if (!sdp_type_maybe) {
+ RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
+ return nullptr;
+ }
+ return CreateSessionDescription(*sdp_type_maybe, std_description);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription(
+ JNIEnv* jni,
+ const std::string& sdp,
+ const std::string& type) {
+ return Java_SessionDescription_Constructor(
+ jni, Java_Type_fromCanonicalForm(jni, NativeToJavaString(jni, type)),
+ NativeToJavaString(jni, sdp));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h
new file mode 100644
index 0000000000..f0f49cb2ee
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/session_description.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
+#define SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
+
+#include <jni.h>
+#include <memory>
+#include <string>
+
+#include "api/jsep.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_sdp);
+
+ScopedJavaLocalRef<jobject> NativeToJavaSessionDescription(
+ JNIEnv* jni,
+ const std::string& sdp,
+ const std::string& type);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SESSION_DESCRIPTION_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
new file mode 100644
index 0000000000..74ef3b8049
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h"
+#include "sdk/android/generated_peerconnection_jni/SSLCertificateVerifier_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+namespace webrtc {
+namespace jni {
+
+SSLCertificateVerifierWrapper::SSLCertificateVerifierWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& ssl_certificate_verifier)
+ : ssl_certificate_verifier_(jni, ssl_certificate_verifier) {}
+
+SSLCertificateVerifierWrapper::~SSLCertificateVerifierWrapper() = default;
+
+bool SSLCertificateVerifierWrapper::Verify(
+ const rtc::SSLCertificate& certificate) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ // Serialize the der encoding of the cert into a jbyteArray
+ rtc::Buffer cert_der_buffer;
+ certificate.ToDER(&cert_der_buffer);
+ ScopedJavaLocalRef<jbyteArray> jni_buffer(
+ jni, jni->NewByteArray(cert_der_buffer.size()));
+ jni->SetByteArrayRegion(
+ jni_buffer.obj(), 0, cert_der_buffer.size(),
+ reinterpret_cast<const jbyte*>(cert_der_buffer.data()));
+
+ return Java_SSLCertificateVerifier_verify(jni, ssl_certificate_verifier_,
+ jni_buffer);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h
new file mode 100644
index 0000000000..8c883f445b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "rtc_base/ssl_certificate.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java SSLCertifiacteVerifier class. Delegates method calls through
+// JNI and wraps the encoder inside SSLCertificateVerifierWrapper.
+class SSLCertificateVerifierWrapper : public rtc::SSLCertificateVerifier {
+ public:
+ SSLCertificateVerifierWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& ssl_certificate_verifier);
+ ~SSLCertificateVerifierWrapper() override;
+
+ bool Verify(const rtc::SSLCertificate& certificate) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> ssl_certificate_verifier_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_SSL_CERTIFICATE_VERIFIER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc
new file mode 100644
index 0000000000..6d4a31df1c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/stats_observer.h"
+
+#include <vector>
+
+#include "sdk/android/generated_peerconnection_jni/StatsObserver_jni.h"
+#include "sdk/android/generated_peerconnection_jni/StatsReport_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+ScopedJavaLocalRef<jobject> NativeToJavaStatsReportValue(
+ JNIEnv* env,
+ const rtc::scoped_refptr<StatsReport::Value>& value_ptr) {
+ // Should we use the '.name' enum value here instead of converting the
+ // name to a string?
+ return Java_Value_Constructor(
+ env, NativeToJavaString(env, value_ptr->display_name()),
+ NativeToJavaString(env, value_ptr->ToString()));
+}
+
+ScopedJavaLocalRef<jobjectArray> NativeToJavaStatsReportValueArray(
+ JNIEnv* env,
+ const StatsReport::Values& value_map) {
+ // Ignore the keys and make an array out of the values.
+ std::vector<StatsReport::ValuePtr> values;
+ for (const auto& it : value_map)
+ values.push_back(it.second);
+ return NativeToJavaObjectArray(env, values,
+ org_webrtc_StatsReport_00024Value_clazz(env),
+ &NativeToJavaStatsReportValue);
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaStatsReport(JNIEnv* env,
+ const StatsReport& report) {
+ return Java_StatsReport_Constructor(
+ env, NativeToJavaString(env, report.id()->ToString()),
+ NativeToJavaString(env, report.TypeToString()), report.timestamp(),
+ NativeToJavaStatsReportValueArray(env, report.values()));
+}
+
+} // namespace
+
+StatsObserverJni::StatsObserverJni(JNIEnv* jni,
+ const JavaRef<jobject>& j_observer)
+ : j_observer_global_(jni, j_observer) {}
+
+StatsObserverJni::~StatsObserverJni() = default;
+
+void StatsObserverJni::OnComplete(const StatsReports& reports) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobjectArray> j_reports =
+ NativeToJavaObjectArray(env, reports, org_webrtc_StatsReport_clazz(env),
+ [](JNIEnv* env, const StatsReport* report) {
+ return NativeToJavaStatsReport(env, *report);
+ });
+ Java_StatsObserver_onComplete(env, j_observer_global_, j_reports);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h
new file mode 100644
index 0000000000..0cfd43384b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/stats_observer.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
+#define SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
+
+#include "api/peer_connection_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverJni : public StatsObserver {
+ public:
+ StatsObserverJni(JNIEnv* jni, const JavaRef<jobject>& j_observer);
+ ~StatsObserverJni() override;
+
+ void OnComplete(const StatsReports& reports) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> j_observer_global_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_STATS_OBSERVER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc
new file mode 100644
index 0000000000..5c93fcd7c0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.cc
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/turn_customizer.h"
+#include "sdk/android/generated_peerconnection_jni/TurnCustomizer_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+TurnCustomizer* GetNativeTurnCustomizer(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_turn_customizer) {
+ if (IsNull(env, j_turn_customizer))
+ return nullptr;
+ return reinterpret_cast<webrtc::TurnCustomizer*>(
+ Java_TurnCustomizer_getNativeTurnCustomizer(env, j_turn_customizer));
+}
+
+static void JNI_TurnCustomizer_FreeTurnCustomizer(
+ JNIEnv* jni,
+ jlong j_turn_customizer_pointer) {
+ delete reinterpret_cast<TurnCustomizer*>(j_turn_customizer_pointer);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h
new file mode 100644
index 0000000000..359234fc76
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/turn_customizer.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
+#define SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
+
+#include "api/turn_customizer.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+TurnCustomizer* GetNativeTurnCustomizer(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_turn_customizer);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_TURN_CUSTOMIZER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc
new file mode 100644
index 0000000000..b955dbb1ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.cc
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/pc/video.h"
+
+#include <jni.h>
+
+#include <memory>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+#include "sdk/android/src/jni/video_decoder_factory_wrapper.h"
+#include "sdk/android/src/jni/video_encoder_factory_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderFactory* CreateVideoEncoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder_factory) {
+ return IsNull(jni, j_encoder_factory)
+ ? nullptr
+ : new VideoEncoderFactoryWrapper(jni, j_encoder_factory);
+}
+
+VideoDecoderFactory* CreateVideoDecoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder_factory) {
+ return IsNull(jni, j_decoder_factory)
+ ? nullptr
+ : new VideoDecoderFactoryWrapper(jni, j_decoder_factory);
+}
+
+void* CreateVideoSource(JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ jboolean is_screencast,
+ jboolean align_timestamps) {
+ auto source = rtc::make_ref_counted<AndroidVideoTrackSource>(
+ signaling_thread, env, is_screencast, align_timestamps);
+ return source.release();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/pc/video.h b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h
new file mode 100644
index 0000000000..32bc6406a1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/pc/video.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
+#define SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
+
+#include <jni.h>
+
+#include "api/scoped_refptr.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+class VideoEncoderFactory;
+class VideoDecoderFactory;
+} // namespace webrtc
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderFactory* CreateVideoEncoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder_factory);
+
+VideoDecoderFactory* CreateVideoDecoderFactory(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder_factory);
+
+void* CreateVideoSource(JNIEnv* env,
+ rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ jboolean is_screencast,
+ jboolean align_timestamps);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_PC_VIDEO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc
new file mode 100644
index 0000000000..1df8c7ade5
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/scoped_java_ref_counted.h"
+
+#include "sdk/android/generated_base_jni/RefCounted_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+// static
+ScopedJavaRefCounted ScopedJavaRefCounted::Retain(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_object) {
+ Java_RefCounted_retain(jni, j_object);
+ CHECK_EXCEPTION(jni)
+ << "Unexpected java exception from java JavaRefCounted.retain()";
+ return Adopt(jni, j_object);
+}
+
+ScopedJavaRefCounted::~ScopedJavaRefCounted() {
+ if (!j_object_.is_null()) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_RefCounted_release(jni, j_object_);
+ CHECK_EXCEPTION(jni)
+ << "Unexpected java exception from java RefCounted.release()";
+ }
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h
new file mode 100644
index 0000000000..3ea226259e
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/scoped_java_ref_counted.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
+#define SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+// Holds a reference to a java object implementing the RefCounted interface, and
+// calls its release() method from the destructor.
+class ScopedJavaRefCounted {
+ public:
+ // Takes over the caller's reference.
+ static ScopedJavaRefCounted Adopt(JNIEnv* jni,
+ const JavaRef<jobject>& j_object) {
+ return ScopedJavaRefCounted(jni, j_object);
+ }
+
+ // Retains the java object for the live time of this object.
+ static ScopedJavaRefCounted Retain(JNIEnv* jni,
+ const JavaRef<jobject>& j_object);
+ ScopedJavaRefCounted(ScopedJavaRefCounted&& other) = default;
+
+ ScopedJavaRefCounted(const ScopedJavaRefCounted& other) = delete;
+ ScopedJavaRefCounted& operator=(const ScopedJavaRefCounted&) = delete;
+
+ ~ScopedJavaRefCounted();
+
+ private:
+ // Adopts reference.
+ ScopedJavaRefCounted(JNIEnv* jni, const JavaRef<jobject>& j_object)
+ : j_object_(jni, j_object) {}
+
+ ScopedJavaGlobalRef<jobject> j_object_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_SCOPED_JAVA_REF_COUNTED_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc
new file mode 100644
index 0000000000..c0c5fd9d9f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/timestamp_aligner.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "rtc_base/time_utils.h"
+#include "rtc_base/timestamp_aligner.h"
+#include "sdk/android/generated_video_jni/TimestampAligner_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_TimestampAligner_RtcTimeNanos(JNIEnv* env) {
+ return rtc::TimeNanos();
+}
+
+static jlong JNI_TimestampAligner_CreateTimestampAligner(JNIEnv* env) {
+ return jlongFromPointer(new rtc::TimestampAligner());
+}
+
+static void JNI_TimestampAligner_ReleaseTimestampAligner(
+ JNIEnv* env,
+ jlong timestamp_aligner) {
+ delete reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner);
+}
+
+static jlong JNI_TimestampAligner_TranslateTimestamp(
+ JNIEnv* env,
+ jlong timestamp_aligner,
+ jlong camera_time_ns) {
+ return reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner)
+ ->TranslateTimestamp(camera_time_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros()) *
+ rtc::kNumNanosecsPerMicrosec;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc
new file mode 100644
index 0000000000..a218a1d23f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.cc
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_codec_info.h"
+
+#include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni,
+ const JavaRef<jobject>& j_info) {
+ return SdpVideoFormat(
+ JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)),
+ JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)));
+}
+
+ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo(
+ JNIEnv* jni,
+ const SdpVideoFormat& format) {
+ ScopedJavaLocalRef<jobject> j_params =
+ NativeToJavaStringMap(jni, format.parameters);
+ return Java_VideoCodecInfo_Constructor(
+ jni, NativeToJavaString(jni, format.name), j_params);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h
new file mode 100644
index 0000000000..07b073086a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_info.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
+
+#include <jni.h>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni,
+ const JavaRef<jobject>& info);
+ScopedJavaLocalRef<jobject> SdpVideoFormatToVideoCodecInfo(
+ JNIEnv* jni,
+ const SdpVideoFormat& format);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_INFO_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc
new file mode 100644
index 0000000000..e34d6d69e2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_codec_status.h"
+
+#include "sdk/android/generated_video_jni/VideoCodecStatus_jni.h"
+
+namespace webrtc {
+namespace jni {
+
+int32_t JavaToNativeVideoCodecStatus(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_video_codec_status) {
+ return Java_VideoCodecStatus_getNumber(env, j_video_codec_status);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h
new file mode 100644
index 0000000000..607bd46340
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_codec_status.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
+
+#include <jni.h>
+#include <stdint.h>
+
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+int32_t JavaToNativeVideoCodecStatus(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_video_codec_status);
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_CODEC_STATUS_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc
new file mode 100644
index 0000000000..2d9240493a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.cc
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_decoder_factory_wrapper.h"
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_decoder.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/VideoDecoderFactory_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoDecoderFactoryWrapper::VideoDecoderFactoryWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& decoder_factory)
+ : decoder_factory_(jni, decoder_factory) {}
+VideoDecoderFactoryWrapper::~VideoDecoderFactoryWrapper() = default;
+
+std::unique_ptr<VideoDecoder> VideoDecoderFactoryWrapper::CreateVideoDecoder(
+ const SdpVideoFormat& format) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ ScopedJavaLocalRef<jobject> decoder = Java_VideoDecoderFactory_createDecoder(
+ jni, decoder_factory_, j_codec_info);
+ if (!decoder.obj())
+ return nullptr;
+ return JavaToNativeVideoDecoder(jni, decoder);
+}
+
+std::vector<SdpVideoFormat> VideoDecoderFactoryWrapper::GetSupportedFormats()
+ const {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ return JavaToNativeVector<SdpVideoFormat>(
+ env, Java_VideoDecoderFactory_getSupportedCodecs(env, decoder_factory_),
+ &VideoCodecInfoToSdpVideoFormat);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h
new file mode 100644
index 0000000000..2122fdc008
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_factory_wrapper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
+
+#include <jni.h>
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java VideoDecoderFactory class. Delegates method calls through
+// JNI and wraps the decoder inside VideoDecoderWrapper.
+class VideoDecoderFactoryWrapper : public VideoDecoderFactory {
+ public:
+ VideoDecoderFactoryWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& decoder_factory);
+ ~VideoDecoderFactoryWrapper() override;
+
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+ std::unique_ptr<VideoDecoder> CreateVideoDecoder(
+ const SdpVideoFormat& format) override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> decoder_factory_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_FACTORY_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc
new file mode 100644
index 0000000000..a678280f69
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_fallback.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/video_codecs/video_decoder_software_fallback_wrapper.h"
+#include "sdk/android/generated_video_jni/VideoDecoderFallback_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_VideoDecoderFallback_CreateDecoder(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_fallback_decoder,
+ const JavaParamRef<jobject>& j_primary_decoder) {
+ std::unique_ptr<VideoDecoder> fallback_decoder =
+ JavaToNativeVideoDecoder(jni, j_fallback_decoder);
+ std::unique_ptr<VideoDecoder> primary_decoder =
+ JavaToNativeVideoDecoder(jni, j_primary_decoder);
+
+ VideoDecoder* nativeWrapper =
+ CreateVideoDecoderSoftwareFallbackWrapper(std::move(fallback_decoder),
+ std::move(primary_decoder))
+ .release();
+
+ return jlongFromPointer(nativeWrapper);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc
new file mode 100644
index 0000000000..328f8d8d4b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.cc
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_decoder_wrapper.h"
+
+#include "api/video/render_resolution.h"
+#include "api/video/video_frame.h"
+#include "api/video_codecs/video_decoder.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/utility/vp8_header_parser.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/numerics/safe_conversions.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoDecoderWrapper_jni.h"
+#include "sdk/android/generated_video_jni/VideoDecoder_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/encoded_image.h"
+#include "sdk/android/src/jni/video_codec_status.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+// RTP timestamps are 90 kHz.
+const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
+
+template <typename Dst, typename Src>
+inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
+ return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
+ : absl::nullopt;
+}
+} // namespace
+
+VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& decoder)
+ : decoder_(jni, decoder),
+ implementation_name_(JavaToStdString(
+ jni,
+ Java_VideoDecoder_getImplementationName(jni, decoder))),
+ initialized_(false),
+ qp_parsing_enabled_(true) // QP parsing starts enabled and we disable it
+ // if the decoder provides frames.
+
+{
+ decoder_thread_checker_.Detach();
+}
+
+VideoDecoderWrapper::~VideoDecoderWrapper() = default;
+
+bool VideoDecoderWrapper::Configure(const Settings& settings) {
+ RTC_DCHECK_RUN_ON(&decoder_thread_checker_);
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ decoder_settings_ = settings;
+ return ConfigureInternal(jni);
+}
+
+bool VideoDecoderWrapper::ConfigureInternal(JNIEnv* jni) {
+ RenderResolution resolution = decoder_settings_.max_render_resolution();
+ ScopedJavaLocalRef<jobject> settings =
+ Java_Settings_Constructor(jni, decoder_settings_.number_of_cores(),
+ resolution.Width(), resolution.Height());
+
+ ScopedJavaLocalRef<jobject> callback =
+ Java_VideoDecoderWrapper_createDecoderCallback(jni,
+ jlongFromPointer(this));
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoDecoder_initDecode(jni, decoder_, settings, callback));
+ RTC_LOG(LS_INFO) << "initDecode: " << status;
+ if (status == WEBRTC_VIDEO_CODEC_OK) {
+ initialized_ = true;
+ }
+
+ // The decoder was reinitialized so re-enable the QP parsing in case it stops
+ // providing QP values.
+ qp_parsing_enabled_ = true;
+
+ return status == WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoDecoderWrapper::Decode(
+ const EncodedImage& image_param,
+ bool missing_frames,
+ int64_t render_time_ms) {
+ RTC_DCHECK_RUN_ON(&decoder_thread_checker_);
+ if (!initialized_) {
+ // Most likely initializing the codec failed.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Make a mutable copy so we can modify the timestamp.
+ EncodedImage input_image(image_param);
+ // We use RTP timestamp for capture time because capture_time_ms_ is always 0.
+ input_image.capture_time_ms_ =
+ input_image.Timestamp() / kNumRtpTicksPerMillisec;
+
+ FrameExtraInfo frame_extra_info;
+ frame_extra_info.timestamp_ns =
+ input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec;
+ frame_extra_info.timestamp_rtp = input_image.Timestamp();
+ frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
+ frame_extra_info.qp =
+ qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.push_back(frame_extra_info);
+ }
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> jinput_image =
+ NativeToJavaEncodedImage(env, input_image);
+ ScopedJavaLocalRef<jobject> decode_info;
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoDecoder_decode(env, decoder_, jinput_image, decode_info);
+ return HandleReturnCode(env, ret, "decode");
+}
+
+int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoDecoderWrapper::Release() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoDecoder_release(jni, decoder_));
+ RTC_LOG(LS_INFO) << "release: " << status;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.clear();
+ }
+ initialized_ = false;
+ // It is allowed to reinitialize the codec on a different thread.
+ decoder_thread_checker_.Detach();
+ return status;
+}
+
+const char* VideoDecoderWrapper::ImplementationName() const {
+ return implementation_name_.c_str();
+}
+
+void VideoDecoderWrapper::OnDecodedFrame(
+ JNIEnv* env,
+ const JavaRef<jobject>& j_frame,
+ const JavaRef<jobject>& j_decode_time_ms,
+ const JavaRef<jobject>& j_qp) {
+ RTC_DCHECK_RUNS_SERIALIZED(&callback_race_checker_);
+ const int64_t timestamp_ns = GetJavaVideoFrameTimestampNs(env, j_frame);
+
+ FrameExtraInfo frame_extra_info;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+
+ do {
+ if (frame_extra_infos_.empty()) {
+ RTC_LOG(LS_WARNING)
+ << "Java decoder produced an unexpected frame: " << timestamp_ns;
+ return;
+ }
+
+ frame_extra_info = frame_extra_infos_.front();
+ frame_extra_infos_.pop_front();
+ // If the decoder might drop frames so iterate through the queue until we
+ // find a matching timestamp.
+ } while (frame_extra_info.timestamp_ns != timestamp_ns);
+ }
+
+ VideoFrame frame =
+ JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
+ frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
+
+ absl::optional<int32_t> decoding_time_ms =
+ JavaToNativeOptionalInt(env, j_decode_time_ms);
+
+ absl::optional<uint8_t> decoder_qp =
+ cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
+ // If the decoder provides QP values itself, no need to parse the bitstream.
+ // Enable QP parsing if decoder does not provide QP values itself.
+ qp_parsing_enabled_ = !decoder_qp.has_value();
+ callback_->Decoded(frame, decoding_time_ms,
+ decoder_qp ? decoder_qp : frame_extra_info.qp);
+}
+
+VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo() = default;
+VideoDecoderWrapper::FrameExtraInfo::FrameExtraInfo(const FrameExtraInfo&) =
+ default;
+VideoDecoderWrapper::FrameExtraInfo::~FrameExtraInfo() = default;
+
+int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name) {
+ int32_t value = JavaToNativeVideoCodecStatus(jni, j_value);
+ if (value >= 0) { // OK or NO_OUTPUT
+ return value;
+ }
+
+ RTC_LOG(LS_WARNING) << method_name << ": " << value;
+ if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE ||
+ value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error.
+ RTC_LOG(LS_WARNING) << "Java decoder requested software fallback.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Try resetting the codec.
+ if (Release() == WEBRTC_VIDEO_CODEC_OK && ConfigureInternal(jni)) {
+ RTC_LOG(LS_WARNING) << "Reset Java decoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ RTC_LOG(LS_WARNING) << "Unable to reset Java decoder.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+}
+
+absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
+ const EncodedImage& input_image) {
+ if (input_image.qp_ != -1) {
+ return input_image.qp_;
+ }
+
+ absl::optional<uint8_t> qp;
+ switch (decoder_settings_.codec_type()) {
+ case kVideoCodecVP8: {
+ int qp_int;
+ if (vp8::GetQp(input_image.data(), input_image.size(), &qp_int)) {
+ qp = qp_int;
+ }
+ break;
+ }
+ case kVideoCodecVP9: {
+ int qp_int;
+ if (vp9::GetQp(input_image.data(), input_image.size(), &qp_int)) {
+ qp = qp_int;
+ }
+ break;
+ }
+ case kVideoCodecH264: {
+ h264_bitstream_parser_.ParseBitstream(input_image);
+ qp = h264_bitstream_parser_.GetLastSliceQp();
+ break;
+ }
+ default:
+ break; // Default is to not provide QP.
+ }
+ return qp;
+}
+
+std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder) {
+ const jlong native_decoder =
+ Java_VideoDecoder_createNativeVideoDecoder(jni, j_decoder);
+ VideoDecoder* decoder;
+ if (native_decoder == 0) {
+ decoder = new VideoDecoderWrapper(jni, j_decoder);
+ } else {
+ decoder = reinterpret_cast<VideoDecoder*>(native_decoder);
+ }
+ return std::unique_ptr<VideoDecoder>(decoder);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h
new file mode 100644
index 0000000000..49d0fbf048
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_decoder_wrapper.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
+
+#include <jni.h>
+
+#include <atomic>
+#include <deque>
+
+#include "api/sequence_checker.h"
+#include "api/video_codecs/video_decoder.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "rtc_base/race_checker.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wraps a Java decoder and delegates all calls to it.
+class VideoDecoderWrapper : public VideoDecoder {
+ public:
+ VideoDecoderWrapper(JNIEnv* jni, const JavaRef<jobject>& decoder);
+ ~VideoDecoderWrapper() override;
+
+ bool Configure(const Settings& settings) override;
+
+ int32_t Decode(const EncodedImage& input_image,
+ bool missing_frames,
+ int64_t render_time_ms) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) override;
+
+ // TODO(sakal): This is not always called on the correct thread. It is called
+ // from VCMGenericDecoder destructor which is on a different thread but is
+ // still safe and synchronous.
+ int32_t Release() override RTC_NO_THREAD_SAFETY_ANALYSIS;
+
+ const char* ImplementationName() const override;
+
+ // Wraps the frame to a AndroidVideoBuffer and passes it to the callback.
+ void OnDecodedFrame(JNIEnv* env,
+ const JavaRef<jobject>& j_frame,
+ const JavaRef<jobject>& j_decode_time_ms,
+ const JavaRef<jobject>& j_qp);
+
+ private:
+ struct FrameExtraInfo {
+ int64_t timestamp_ns; // Used as an identifier of the frame.
+
+ uint32_t timestamp_rtp;
+ int64_t timestamp_ntp;
+ absl::optional<uint8_t> qp;
+
+ FrameExtraInfo();
+ FrameExtraInfo(const FrameExtraInfo&);
+ ~FrameExtraInfo();
+ };
+
+ bool ConfigureInternal(JNIEnv* jni) RTC_RUN_ON(decoder_thread_checker_);
+
+ // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
+ // status code.
+ int32_t HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name)
+ RTC_RUN_ON(decoder_thread_checker_);
+
+ absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
+ RTC_RUN_ON(decoder_thread_checker_);
+
+ const ScopedJavaGlobalRef<jobject> decoder_;
+ const std::string implementation_name_;
+
+ SequenceChecker decoder_thread_checker_;
+ // Callbacks must be executed sequentially on an arbitrary thread. We do not
+ // own this thread so a thread checker cannot be used.
+ rtc::RaceChecker callback_race_checker_;
+
+ // Initialized on Configure and immutable after that.
+ VideoDecoder::Settings decoder_settings_
+ RTC_GUARDED_BY(decoder_thread_checker_);
+
+ bool initialized_ RTC_GUARDED_BY(decoder_thread_checker_);
+ H264BitstreamParser h264_bitstream_parser_
+ RTC_GUARDED_BY(decoder_thread_checker_);
+
+ DecodedImageCallback* callback_ RTC_GUARDED_BY(callback_race_checker_);
+
+ // Accessed both on the decoder thread and the callback thread.
+ std::atomic<bool> qp_parsing_enabled_;
+ Mutex frame_extra_infos_lock_;
+ std::deque<FrameExtraInfo> frame_extra_infos_
+ RTC_GUARDED_BY(frame_extra_infos_lock_);
+};
+
+/* If the j_decoder is a wrapped native decoder, unwrap it. If it is not,
+ * wrap it in a VideoDecoderWrapper.
+ */
+std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_decoder);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_DECODER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc
new file mode 100644
index 0000000000..7df129b360
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.cc
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_encoder_factory_wrapper.h"
+
+#include "api/video/render_resolution.h"
+#include "api/video_codecs/video_encoder.h"
+#include "rtc_base/logging.h"
+#include "sdk/android/generated_video_jni/VideoEncoderFactory_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/video_codec_info.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+namespace {
+class VideoEncoderSelectorWrapper
+ : public VideoEncoderFactory::EncoderSelectorInterface {
+ public:
+ VideoEncoderSelectorWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& encoder_selector)
+ : encoder_selector_(jni, encoder_selector) {}
+
+ void OnCurrentEncoder(const SdpVideoFormat& format) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ Java_VideoEncoderSelector_onCurrentEncoder(jni, encoder_selector_,
+ j_codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnAvailableBitrate(
+ const DataRate& rate) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onAvailableBitrate(jni, encoder_selector_,
+ rate.kbps<int>());
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnResolutionChange(
+ const RenderResolution& resolution) override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onResolutionChange(
+ jni, encoder_selector_, resolution.Width(), resolution.Height());
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ absl::optional<SdpVideoFormat> OnEncoderBroken() override {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> codec_info =
+ Java_VideoEncoderSelector_onEncoderBroken(jni, encoder_selector_);
+ if (codec_info.is_null()) {
+ return absl::nullopt;
+ }
+ return VideoCodecInfoToSdpVideoFormat(jni, codec_info);
+ }
+
+ private:
+ const ScopedJavaGlobalRef<jobject> encoder_selector_;
+};
+
+} // namespace
+
+VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper(
+ JNIEnv* jni,
+ const JavaRef<jobject>& encoder_factory)
+ : encoder_factory_(jni, encoder_factory) {
+ const ScopedJavaLocalRef<jobjectArray> j_supported_codecs =
+ Java_VideoEncoderFactory_getSupportedCodecs(jni, encoder_factory);
+ supported_formats_ = JavaToNativeVector<SdpVideoFormat>(
+ jni, j_supported_codecs, &VideoCodecInfoToSdpVideoFormat);
+ const ScopedJavaLocalRef<jobjectArray> j_implementations =
+ Java_VideoEncoderFactory_getImplementations(jni, encoder_factory);
+ implementations_ = JavaToNativeVector<SdpVideoFormat>(
+ jni, j_implementations, &VideoCodecInfoToSdpVideoFormat);
+}
+VideoEncoderFactoryWrapper::~VideoEncoderFactoryWrapper() = default;
+
+std::unique_ptr<VideoEncoder> VideoEncoderFactoryWrapper::CreateVideoEncoder(
+ const SdpVideoFormat& format) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_codec_info =
+ SdpVideoFormatToVideoCodecInfo(jni, format);
+ ScopedJavaLocalRef<jobject> encoder = Java_VideoEncoderFactory_createEncoder(
+ jni, encoder_factory_, j_codec_info);
+ if (!encoder.obj())
+ return nullptr;
+ return JavaToNativeVideoEncoder(jni, encoder);
+}
+
+std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetSupportedFormats()
+ const {
+ return supported_formats_;
+}
+
+std::vector<SdpVideoFormat> VideoEncoderFactoryWrapper::GetImplementations()
+ const {
+ return implementations_;
+}
+
+std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
+VideoEncoderFactoryWrapper::GetEncoderSelector() const {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> selector =
+ Java_VideoEncoderFactory_getEncoderSelector(jni, encoder_factory_);
+ if (selector.is_null()) {
+ return nullptr;
+ }
+
+ return std::make_unique<VideoEncoderSelectorWrapper>(jni, selector);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h
new file mode 100644
index 0000000000..2be6b1b33f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_factory_wrapper.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
+
+#include <jni.h>
+#include <vector>
+
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wrapper for Java VideoEncoderFactory class. Delegates method calls through
+// JNI and wraps the encoder inside VideoEncoderWrapper.
+class VideoEncoderFactoryWrapper : public VideoEncoderFactory {
+ public:
+ VideoEncoderFactoryWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& encoder_factory);
+ ~VideoEncoderFactoryWrapper() override;
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(
+ const SdpVideoFormat& format) override;
+
+ // Returns a list of supported codecs in order of preference.
+ std::vector<SdpVideoFormat> GetSupportedFormats() const override;
+
+ std::vector<SdpVideoFormat> GetImplementations() const override;
+
+ std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;
+
+ private:
+ const ScopedJavaGlobalRef<jobject> encoder_factory_;
+ std::vector<SdpVideoFormat> supported_formats_;
+ std::vector<SdpVideoFormat> implementations_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_FACTORY_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc
new file mode 100644
index 0000000000..d581572abf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_fallback.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
+#include "sdk/android/generated_video_jni/VideoEncoderFallback_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_VideoEncoderFallback_CreateEncoder(
+ JNIEnv* jni,
+ const JavaParamRef<jobject>& j_fallback_encoder,
+ const JavaParamRef<jobject>& j_primary_encoder) {
+ std::unique_ptr<VideoEncoder> fallback_encoder =
+ JavaToNativeVideoEncoder(jni, j_fallback_encoder);
+ std::unique_ptr<VideoEncoder> primary_encoder =
+ JavaToNativeVideoEncoder(jni, j_primary_encoder);
+
+ VideoEncoder* nativeWrapper =
+ CreateVideoEncoderSoftwareFallbackWrapper(std::move(fallback_encoder),
+ std::move(primary_encoder))
+ .release();
+
+ return jlongFromPointer(nativeWrapper);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc
new file mode 100644
index 0000000000..c23ab1e485
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.cc
@@ -0,0 +1,490 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+
+#include <utility>
+
+#include "common_video/h264/h264_common.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "modules/video_coding/include/video_error_codes.h"
+#include "modules/video_coding/svc/scalable_video_controller_no_layering.h"
+#include "modules/video_coding/utility/vp8_header_parser.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h"
+#include "sdk/android/generated_video_jni/VideoEncoder_jni.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/encoded_image.h"
+#include "sdk/android/src/jni/video_codec_status.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder)
+ : encoder_(jni, j_encoder), int_array_class_(GetClass(jni, "[I")) {
+ initialized_ = false;
+ num_resets_ = 0;
+
+ // Fetch and update encoder info.
+ UpdateEncoderInfo(jni);
+}
+VideoEncoderWrapper::~VideoEncoderWrapper() = default;
+
+int VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ codec_settings_ = *codec_settings;
+ capabilities_ = settings.capabilities;
+ number_of_cores_ = settings.number_of_cores;
+ num_resets_ = 0;
+
+ return InitEncodeInternal(jni);
+}
+
+int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
+ bool automatic_resize_on;
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
+ break;
+ case kVideoCodecVP9:
+ automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
+ gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
+ gof_idx_ = 0;
+ break;
+ default:
+ automatic_resize_on = true;
+ }
+
+ RTC_DCHECK(capabilities_);
+ ScopedJavaLocalRef<jobject> capabilities =
+ Java_Capabilities_Constructor(jni, capabilities_->loss_notification);
+
+ ScopedJavaLocalRef<jobject> settings = Java_Settings_Constructor(
+ jni, number_of_cores_, codec_settings_.width, codec_settings_.height,
+ static_cast<int>(codec_settings_.startBitrate),
+ static_cast<int>(codec_settings_.maxFramerate),
+ static_cast<int>(codec_settings_.numberOfSimulcastStreams),
+ automatic_resize_on, capabilities);
+
+ ScopedJavaLocalRef<jobject> callback =
+ Java_VideoEncoderWrapper_createEncoderCallback(jni,
+ jlongFromPointer(this));
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoEncoder_initEncode(jni, encoder_, settings, callback));
+ RTC_LOG(LS_INFO) << "initEncode: " << status;
+
+ // Some encoder's properties depend on settings and may change after
+ // initialization.
+ UpdateEncoderInfo(jni);
+
+ if (status == WEBRTC_VIDEO_CODEC_OK) {
+ initialized_ = true;
+ }
+ return status;
+}
+
+void VideoEncoderWrapper::UpdateEncoderInfo(JNIEnv* jni) {
+ encoder_info_.supports_native_handle = true;
+
+ encoder_info_.implementation_name = JavaToStdString(
+ jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
+
+ encoder_info_.is_hardware_accelerated =
+ Java_VideoEncoder_isHardwareEncoder(jni, encoder_);
+
+ encoder_info_.scaling_settings = GetScalingSettingsInternal(jni);
+
+ encoder_info_.resolution_bitrate_limits = JavaToNativeResolutionBitrateLimits(
+ jni, Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_));
+
+ EncoderInfo info = GetEncoderInfoInternal(jni);
+ encoder_info_.requested_resolution_alignment =
+ info.requested_resolution_alignment;
+ encoder_info_.apply_alignment_to_all_simulcast_layers =
+ info.apply_alignment_to_all_simulcast_layers;
+}
+
+int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t VideoEncoderWrapper::Release() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ int32_t status = JavaToNativeVideoCodecStatus(
+ jni, Java_VideoEncoder_release(jni, encoder_));
+ RTC_LOG(LS_INFO) << "release: " << status;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.clear();
+ }
+ initialized_ = false;
+
+ return status;
+}
+
+int32_t VideoEncoderWrapper::Encode(
+ const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) {
+ if (!initialized_) {
+ // Most likely initializing the codec failed.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ // Construct encode info.
+ ScopedJavaLocalRef<jobjectArray> j_frame_types =
+ NativeToJavaFrameTypeArray(jni, *frame_types);
+ ScopedJavaLocalRef<jobject> encode_info =
+ Java_EncodeInfo_Constructor(jni, j_frame_types);
+
+ FrameExtraInfo info;
+ info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
+ info.timestamp_rtp = frame.timestamp();
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ frame_extra_infos_.push_back(info);
+ }
+
+ ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoEncoder_encode(jni, encoder_, j_frame, encode_info);
+ ReleaseJavaVideoFrame(jni, j_frame);
+ return HandleReturnCode(jni, ret, "encode");
+}
+
+void VideoEncoderWrapper::SetRates(const RateControlParameters& rc_parameters) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ ScopedJavaLocalRef<jobject> j_rc_parameters =
+ ToJavaRateControlParameters(jni, rc_parameters);
+ ScopedJavaLocalRef<jobject> ret =
+ Java_VideoEncoder_setRates(jni, encoder_, j_rc_parameters);
+ HandleReturnCode(jni, ret, "setRates");
+}
+
+VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfo() const {
+ return encoder_info_;
+}
+
+VideoEncoderWrapper::ScalingSettings
+VideoEncoderWrapper::GetScalingSettingsInternal(JNIEnv* jni) const {
+ ScopedJavaLocalRef<jobject> j_scaling_settings =
+ Java_VideoEncoder_getScalingSettings(jni, encoder_);
+ bool isOn =
+ Java_VideoEncoderWrapper_getScalingSettingsOn(jni, j_scaling_settings);
+
+ if (!isOn)
+ return ScalingSettings::kOff;
+
+ absl::optional<int> low = JavaToNativeOptionalInt(
+ jni,
+ Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
+ absl::optional<int> high = JavaToNativeOptionalInt(
+ jni,
+ Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));
+
+ if (low && high)
+ return ScalingSettings(*low, *high);
+
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8: {
+ // Same as in vp8_impl.cc.
+ static const int kLowVp8QpThreshold = 29;
+ static const int kHighVp8QpThreshold = 95;
+ return ScalingSettings(low.value_or(kLowVp8QpThreshold),
+ high.value_or(kHighVp8QpThreshold));
+ }
+ case kVideoCodecVP9: {
+ // QP is obtained from VP9-bitstream, so the QP corresponds to the
+ // bitstream range of [0, 255] and not the user-level range of [0,63].
+ static const int kLowVp9QpThreshold = 96;
+ static const int kHighVp9QpThreshold = 185;
+
+ return VideoEncoder::ScalingSettings(kLowVp9QpThreshold,
+ kHighVp9QpThreshold);
+ }
+ case kVideoCodecH264: {
+ // Same as in h264_encoder_impl.cc.
+ static const int kLowH264QpThreshold = 24;
+ static const int kHighH264QpThreshold = 37;
+ return ScalingSettings(low.value_or(kLowH264QpThreshold),
+ high.value_or(kHighH264QpThreshold));
+ }
+ default:
+ return ScalingSettings::kOff;
+ }
+}
+
+VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfoInternal(
+ JNIEnv* jni) const {
+ ScopedJavaLocalRef<jobject> j_encoder_info =
+ Java_VideoEncoder_getEncoderInfo(jni, encoder_);
+
+ jint requested_resolution_alignment =
+ Java_EncoderInfo_getRequestedResolutionAlignment(jni, j_encoder_info);
+
+ jboolean apply_alignment_to_all_simulcast_layers =
+ Java_EncoderInfo_getApplyAlignmentToAllSimulcastLayers(jni,
+ j_encoder_info);
+
+ VideoEncoder::EncoderInfo info;
+ info.requested_resolution_alignment = requested_resolution_alignment;
+ info.apply_alignment_to_all_simulcast_layers =
+ apply_alignment_to_all_simulcast_layers;
+
+ return info;
+}
+
+void VideoEncoderWrapper::OnEncodedFrame(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image) {
+ EncodedImage frame = JavaToNativeEncodedImage(jni, j_encoded_image);
+ int64_t capture_time_ns =
+ GetJavaEncodedImageCaptureTimeNs(jni, j_encoded_image);
+
+ // Encoded frames are delivered in the order received, but some of them
+ // may be dropped, so remove records of frames older than the current
+ // one.
+ //
+ // NOTE: if the current frame is associated with Encoder A, in the time
+ // since this frame was received, Encoder A could have been
+ // Release()'ed, Encoder B InitEncode()'ed (due to reuse of Encoder A),
+ // and frames received by Encoder B. Thus there may be frame_extra_infos
+ // entries that don't belong to us, and we need to be careful not to
+ // remove them. Removing only those entries older than the current frame
+ // provides this guarantee.
+ FrameExtraInfo frame_extra_info;
+ {
+ MutexLock lock(&frame_extra_infos_lock_);
+ while (!frame_extra_infos_.empty() &&
+ frame_extra_infos_.front().capture_time_ns < capture_time_ns) {
+ frame_extra_infos_.pop_front();
+ }
+ if (frame_extra_infos_.empty() ||
+ frame_extra_infos_.front().capture_time_ns != capture_time_ns) {
+ RTC_LOG(LS_WARNING)
+ << "Java encoder produced an unexpected frame with timestamp: "
+ << capture_time_ns;
+ return;
+ }
+ frame_extra_info = frame_extra_infos_.front();
+ frame_extra_infos_.pop_front();
+ }
+
+ // This is a bit subtle. The `frame` variable from the lambda capture is
+ // const. Which implies that (i) we need to make a copy to be able to
+ // write to the metadata, and (ii) we should avoid using the .data()
+ // method (including implicit conversion to ArrayView) on the non-const
+ // copy, since that would trigget a copy operation on the underlying
+ // CopyOnWriteBuffer.
+ EncodedImage frame_copy = frame;
+
+ frame_copy.SetTimestamp(frame_extra_info.timestamp_rtp);
+ frame_copy.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
+
+ if (frame_copy.qp_ < 0)
+ frame_copy.qp_ = ParseQp(frame);
+
+ CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
+
+ callback_->OnEncodedImage(frame_copy, &info);
+}
+
+int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name) {
+ int32_t value = JavaToNativeVideoCodecStatus(jni, j_value);
+ if (value >= 0) { // OK or NO_OUTPUT
+ return value;
+ }
+
+ RTC_LOG(LS_WARNING) << method_name << ": " << value;
+ if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE ||
+ value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error.
+ RTC_LOG(LS_WARNING) << "Java encoder requested software fallback.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ // Try resetting the codec.
+ if (Release() == WEBRTC_VIDEO_CODEC_OK &&
+ InitEncodeInternal(jni) == WEBRTC_VIDEO_CODEC_OK) {
+ RTC_LOG(LS_WARNING) << "Reset Java encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ RTC_LOG(LS_WARNING) << "Unable to reset Java encoder.";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+}
+
+int VideoEncoderWrapper::ParseQp(rtc::ArrayView<const uint8_t> buffer) {
+ int qp;
+ bool success;
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
+ break;
+ case kVideoCodecVP9:
+ success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
+ break;
+ case kVideoCodecH264:
+ h264_bitstream_parser_.ParseBitstream(buffer);
+ qp = h264_bitstream_parser_.GetLastSliceQp().value_or(-1);
+ success = (qp >= 0);
+ break;
+ default: // Default is to not provide QP.
+ success = false;
+ break;
+ }
+ return success ? qp : -1; // -1 means unknown QP.
+}
+
+CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
+ const EncodedImage& frame) {
+ const bool key_frame = frame._frameType == VideoFrameType::kVideoFrameKey;
+
+ CodecSpecificInfo info;
+ // For stream with scalability, NextFrameConfig should be called before
+ // encoding and used to configure encoder, then passed here e.g. via
+ // FrameExtraInfo structure. But while this encoder wrapper uses only trivial
+ // scalability, NextFrameConfig can be called here.
+ auto layer_frames = svc_controller_.NextFrameConfig(/*reset=*/key_frame);
+ RTC_DCHECK_EQ(layer_frames.size(), 1);
+ info.generic_frame_info = svc_controller_.OnEncodeDone(layer_frames[0]);
+ if (key_frame) {
+ info.template_structure = svc_controller_.DependencyStructure();
+ info.template_structure->resolutions = {
+ RenderResolution(frame._encodedWidth, frame._encodedHeight)};
+ }
+
+ info.codecType = codec_settings_.codecType;
+
+ switch (codec_settings_.codecType) {
+ case kVideoCodecVP8:
+ info.codecSpecific.VP8.nonReference = false;
+ info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
+ info.codecSpecific.VP8.layerSync = false;
+ info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
+ break;
+ case kVideoCodecVP9:
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.first_frame_in_picture = true;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = frame._encodedWidth;
+ info.codecSpecific.VP9.height[0] = frame._encodedHeight;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return info;
+}
+
+ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaBitrateAllocation(
+ JNIEnv* jni,
+ const VideoBitrateAllocation& allocation) {
+ ScopedJavaLocalRef<jobjectArray> j_allocation_array(
+ jni, jni->NewObjectArray(kMaxSpatialLayers, int_array_class_.obj(),
+ nullptr /* initial */));
+ for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
+ std::array<int32_t, kMaxTemporalStreams> spatial_layer;
+ for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
+ spatial_layer[temporal_i] = allocation.GetBitrate(spatial_i, temporal_i);
+ }
+
+ ScopedJavaLocalRef<jintArray> j_array_spatial_layer =
+ NativeToJavaIntArray(jni, spatial_layer);
+ jni->SetObjectArrayElement(j_allocation_array.obj(), spatial_i,
+ j_array_spatial_layer.obj());
+ }
+ return Java_BitrateAllocation_Constructor(jni, j_allocation_array);
+}
+
+ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaRateControlParameters(
+ JNIEnv* jni,
+ const VideoEncoder::RateControlParameters& rc_parameters) {
+ ScopedJavaLocalRef<jobject> j_bitrate_allocation =
+ ToJavaBitrateAllocation(jni, rc_parameters.bitrate);
+
+ return Java_RateControlParameters_Constructor(jni, j_bitrate_allocation,
+ rc_parameters.framerate_fps);
+}
+
+std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder) {
+ const jlong native_encoder =
+ Java_VideoEncoder_createNativeVideoEncoder(jni, j_encoder);
+ VideoEncoder* encoder;
+ if (native_encoder == 0) {
+ encoder = new VideoEncoderWrapper(jni, j_encoder);
+ } else {
+ encoder = reinterpret_cast<VideoEncoder*>(native_encoder);
+ }
+ return std::unique_ptr<VideoEncoder>(encoder);
+}
+
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(
+ JNIEnv* jni,
+ const JavaRef<jobjectArray>& j_bitrate_limits_array) {
+ std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits;
+
+ const jsize array_length = jni->GetArrayLength(j_bitrate_limits_array.obj());
+ for (int i = 0; i < array_length; ++i) {
+ ScopedJavaLocalRef<jobject> j_bitrate_limits = ScopedJavaLocalRef<jobject>(
+ jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i));
+
+ jint frame_size_pixels =
+ Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits);
+ jint min_start_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni,
+ j_bitrate_limits);
+ jint min_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits);
+ jint max_bitrate_bps =
+ Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits);
+
+ resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits(
+ frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps,
+ max_bitrate_bps));
+ }
+
+ return resolution_bitrate_limits;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h
new file mode 100644
index 0000000000..5c5aab7588
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_encoder_wrapper.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
+
+#include <jni.h>
+
+#include <deque>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "absl/types/optional.h"
+#include "api/video_codecs/video_encoder.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
+#include "modules/video_coding/svc/scalable_video_controller_no_layering.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// Wraps a Java encoder and delegates all calls to it.
+class VideoEncoderWrapper : public VideoEncoder {
+ public:
+ VideoEncoderWrapper(JNIEnv* jni, const JavaRef<jobject>& j_encoder);
+ ~VideoEncoderWrapper() override;
+
+ int32_t InitEncode(const VideoCodec* codec_settings,
+ const Settings& settings) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ EncodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ int32_t Encode(const VideoFrame& frame,
+ const std::vector<VideoFrameType>* frame_types) override;
+
+ void SetRates(const RateControlParameters& rc_parameters) override;
+
+ EncoderInfo GetEncoderInfo() const override;
+
+ // Should only be called by JNI.
+ void OnEncodedFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_encoded_image);
+
+ private:
+ struct FrameExtraInfo {
+ int64_t capture_time_ns; // Used as an identifier of the frame.
+
+ uint32_t timestamp_rtp;
+ };
+
+ int32_t InitEncodeInternal(JNIEnv* jni);
+
+ // Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
+ // status code.
+ int32_t HandleReturnCode(JNIEnv* jni,
+ const JavaRef<jobject>& j_value,
+ const char* method_name);
+
+ int ParseQp(rtc::ArrayView<const uint8_t> buffer);
+
+ CodecSpecificInfo ParseCodecSpecificInfo(const EncodedImage& frame);
+
+ ScopedJavaLocalRef<jobject> ToJavaBitrateAllocation(
+ JNIEnv* jni,
+ const VideoBitrateAllocation& allocation);
+
+ ScopedJavaLocalRef<jobject> ToJavaRateControlParameters(
+ JNIEnv* jni,
+ const VideoEncoder::RateControlParameters& rc_parameters);
+
+ void UpdateEncoderInfo(JNIEnv* jni);
+
+ ScalingSettings GetScalingSettingsInternal(JNIEnv* jni) const;
+ std::vector<ResolutionBitrateLimits> GetResolutionBitrateLimits(
+ JNIEnv* jni) const;
+
+ VideoEncoder::EncoderInfo GetEncoderInfoInternal(JNIEnv* jni) const;
+
+ const ScopedJavaGlobalRef<jobject> encoder_;
+ const ScopedJavaGlobalRef<jclass> int_array_class_;
+
+ // Modified both on the encoder thread and the callback thread.
+ Mutex frame_extra_infos_lock_;
+ std::deque<FrameExtraInfo> frame_extra_infos_
+ RTC_GUARDED_BY(frame_extra_infos_lock_);
+ EncodedImageCallback* callback_;
+ bool initialized_;
+ int num_resets_;
+ absl::optional<VideoEncoder::Capabilities> capabilities_;
+ int number_of_cores_;
+ VideoCodec codec_settings_;
+ EncoderInfo encoder_info_;
+ H264BitstreamParser h264_bitstream_parser_;
+
+ // Fills frame dependencies in codec-agnostic format.
+ ScalableVideoControllerNoLayering svc_controller_;
+ // VP9 variables to populate codec specific structure.
+ GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ size_t gof_idx_;
+};
+
+/* If the j_encoder is a wrapped native encoder, unwrap it. If it is not,
+ * wrap it in a VideoEncoderWrapper.
+ */
+std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_encoder);
+
+bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef<jobject>& j_encoder);
+
+std::vector<VideoEncoder::ResolutionBitrateLimits>
+JavaToNativeResolutionBitrateLimits(
+ JNIEnv* jni,
+ const JavaRef<jobjectArray>& j_bitrate_limits_array);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_ENCODER_WRAPPER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc
new file mode 100644
index 0000000000..121b34fa94
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.cc
@@ -0,0 +1,319 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_frame.h"
+
+#include "api/scoped_refptr.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_video_jni/VideoFrame_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+namespace webrtc {
+namespace jni {
+
+namespace {
+
+class AndroidVideoBuffer : public VideoFrameBuffer {
+ public:
+ // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
+ static rtc::scoped_refptr<AndroidVideoBuffer> Create(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ // Similar to the Create() above, but adopts and takes ownership of the Java
+ // VideoFrame.Buffer. I.e. retain() will not be called, but release() will be
+ // called when the returned AndroidVideoBuffer is destroyed.
+ static rtc::scoped_refptr<AndroidVideoBuffer> Adopt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ ~AndroidVideoBuffer() override;
+
+ const ScopedJavaGlobalRef<jobject>& video_frame_buffer() const;
+
+ // Crops a region defined by `crop_x`, `crop_y`, `crop_width` and
+ // `crop_height`. Scales it to size `scale_width` x `scale_height`.
+ rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scale_width,
+ int scale_height) override;
+
+ protected:
+ // Should not be called directly. Adopts the Java VideoFrame.Buffer. Use
+ // Create() or Adopt() instead for clarity.
+ AndroidVideoBuffer(JNIEnv* jni, const JavaRef<jobject>& j_video_frame_buffer);
+
+ private:
+ Type type() const override;
+ int width() const override;
+ int height() const override;
+
+ rtc::scoped_refptr<I420BufferInterface> ToI420() override;
+
+ const int width_;
+ const int height_;
+ // Holds a VideoFrame.Buffer.
+ const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_;
+};
+
+class AndroidVideoI420Buffer : public I420BufferInterface {
+ public:
+ // Creates a native VideoFrameBuffer from a Java VideoFrame.I420Buffer.
+ static rtc::scoped_refptr<AndroidVideoI420Buffer> Create(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ // Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain()
+ // will not be called, but release() will be called when the returned
+ // AndroidVideoBuffer is destroyed.
+ static rtc::scoped_refptr<AndroidVideoI420Buffer> Adopt(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+ protected:
+ // Should not be called directly. Adopts the buffer. Use Adopt() instead for
+ // clarity.
+ AndroidVideoI420Buffer(JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer);
+ ~AndroidVideoI420Buffer() override;
+
+ private:
+ const uint8_t* DataY() const override { return data_y_; }
+ const uint8_t* DataU() const override { return data_u_; }
+ const uint8_t* DataV() const override { return data_v_; }
+
+ int StrideY() const override { return stride_y_; }
+ int StrideU() const override { return stride_u_; }
+ int StrideV() const override { return stride_v_; }
+
+ int width() const override { return width_; }
+ int height() const override { return height_; }
+
+ const int width_;
+ const int height_;
+ // Holds a VideoFrame.I420Buffer.
+ const ScopedJavaGlobalRef<jobject> j_video_frame_buffer_;
+
+ const uint8_t* data_y_;
+ const uint8_t* data_u_;
+ const uint8_t* data_v_;
+ int stride_y_;
+ int stride_u_;
+ int stride_v_;
+};
+
+rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Create(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return AndroidVideoI420Buffer::Adopt(jni, width, height,
+ j_video_frame_buffer);
+}
+
+rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ RTC_DCHECK_EQ(
+ static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
+ Type::kI420);
+ return rtc::make_ref_counted<AndroidVideoI420Buffer>(jni, width, height,
+ j_video_frame_buffer);
+}
+
+AndroidVideoI420Buffer::AndroidVideoI420Buffer(
+ JNIEnv* jni,
+ int width,
+ int height,
+ const JavaRef<jobject>& j_video_frame_buffer)
+ : width_(width),
+ height_(height),
+ j_video_frame_buffer_(jni, j_video_frame_buffer) {
+ ScopedJavaLocalRef<jobject> j_data_y =
+ Java_I420Buffer_getDataY(jni, j_video_frame_buffer);
+ ScopedJavaLocalRef<jobject> j_data_u =
+ Java_I420Buffer_getDataU(jni, j_video_frame_buffer);
+ ScopedJavaLocalRef<jobject> j_data_v =
+ Java_I420Buffer_getDataV(jni, j_video_frame_buffer);
+
+ data_y_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y.obj()));
+ data_u_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u.obj()));
+ data_v_ =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v.obj()));
+
+ stride_y_ = Java_I420Buffer_getStrideY(jni, j_video_frame_buffer);
+ stride_u_ = Java_I420Buffer_getStrideU(jni, j_video_frame_buffer);
+ stride_v_ = Java_I420Buffer_getStrideV(jni, j_video_frame_buffer);
+}
+
+AndroidVideoI420Buffer::~AndroidVideoI420Buffer() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_Buffer_release(jni, j_video_frame_buffer_);
+}
+
+} // namespace
+
+int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame) {
+ return Java_VideoFrame_getTimestampNs(jni, j_video_frame);
+}
+
+rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ RTC_DCHECK_EQ(
+ static_cast<Type>(Java_Buffer_getBufferType(jni, j_video_frame_buffer)),
+ Type::kNative);
+ return rtc::make_ref_counted<AndroidVideoBuffer>(jni, j_video_frame_buffer);
+}
+
+rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return Adopt(jni, j_video_frame_buffer);
+}
+
+AndroidVideoBuffer::AndroidVideoBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer)
+ : width_(Java_Buffer_getWidth(jni, j_video_frame_buffer)),
+ height_(Java_Buffer_getHeight(jni, j_video_frame_buffer)),
+ j_video_frame_buffer_(jni, j_video_frame_buffer) {}
+
+AndroidVideoBuffer::~AndroidVideoBuffer() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ Java_Buffer_release(jni, j_video_frame_buffer_);
+}
+
+const ScopedJavaGlobalRef<jobject>& AndroidVideoBuffer::video_frame_buffer()
+ const {
+ return j_video_frame_buffer_;
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> AndroidVideoBuffer::CropAndScale(
+ int crop_x,
+ int crop_y,
+ int crop_width,
+ int crop_height,
+ int scale_width,
+ int scale_height) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ return Adopt(jni, Java_Buffer_cropAndScale(jni, j_video_frame_buffer_, crop_x,
+ crop_y, crop_width, crop_height,
+ scale_width, scale_height));
+}
+
+VideoFrameBuffer::Type AndroidVideoBuffer::type() const {
+ return Type::kNative;
+}
+
+int AndroidVideoBuffer::width() const {
+ return width_;
+}
+
+int AndroidVideoBuffer::height() const {
+ return height_;
+}
+
+rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_i420_buffer =
+ Java_Buffer_toI420(jni, j_video_frame_buffer_);
+ // In case I420 conversion fails, we propagate the nullptr.
+ if (j_i420_buffer.is_null()) {
+ return nullptr;
+ }
+
+ // We don't need to retain the buffer because toI420 returns a new object that
+ // we are assumed to take the ownership of.
+ return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
+}
+
+rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer) {
+ VideoFrameBuffer::Type type = static_cast<VideoFrameBuffer::Type>(
+ Java_Buffer_getBufferType(jni, j_video_frame_buffer));
+ switch (type) {
+ case VideoFrameBuffer::Type::kI420: {
+ const int width = Java_Buffer_getWidth(jni, j_video_frame_buffer);
+ const int height = Java_Buffer_getHeight(jni, j_video_frame_buffer);
+ return AndroidVideoI420Buffer::Create(jni, width, height,
+ j_video_frame_buffer);
+ }
+ case VideoFrameBuffer::Type::kNative:
+ return AndroidVideoBuffer::Create(jni, j_video_frame_buffer);
+ default:
+ RTC_CHECK_NOTREACHED();
+ }
+}
+
+VideoFrame JavaToNativeFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame,
+ uint32_t timestamp_rtp) {
+ ScopedJavaLocalRef<jobject> j_video_frame_buffer =
+ Java_VideoFrame_getBuffer(jni, j_video_frame);
+ int rotation = Java_VideoFrame_getRotation(jni, j_video_frame);
+ int64_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
+ return VideoFrame::Builder()
+ .set_video_frame_buffer(buffer)
+ .set_timestamp_rtp(timestamp_rtp)
+ .set_timestamp_ms(timestamp_ns / rtc::kNumNanosecsPerMillisec)
+ .set_rotation(static_cast<VideoRotation>(rotation))
+ .build();
+}
+
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame) {
+ rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
+
+ if (buffer->type() == VideoFrameBuffer::Type::kNative) {
+ AndroidVideoBuffer* android_buffer =
+ static_cast<AndroidVideoBuffer*>(buffer.get());
+ ScopedJavaLocalRef<jobject> j_video_frame_buffer(
+ jni, android_buffer->video_frame_buffer());
+ Java_Buffer_retain(jni, j_video_frame_buffer);
+ return Java_VideoFrame_Constructor(
+ jni, j_video_frame_buffer, static_cast<jint>(frame.rotation()),
+ static_cast<jlong>(frame.timestamp_us() *
+ rtc::kNumNanosecsPerMicrosec));
+ } else {
+ return Java_VideoFrame_Constructor(
+ jni, WrapI420Buffer(jni, buffer->ToI420()),
+ static_cast<jint>(frame.rotation()),
+ static_cast<jlong>(frame.timestamp_us() *
+ rtc::kNumNanosecsPerMicrosec));
+ }
+}
+
+void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame) {
+ Java_VideoFrame_release(jni, j_video_frame);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_frame.h b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h
new file mode 100644
index 0000000000..9b916de40b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_frame.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
+
+#include <jni.h>
+
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+rtc::scoped_refptr<VideoFrameBuffer> JavaToNativeFrameBuffer(
+ JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame_buffer);
+
+VideoFrame JavaToNativeFrame(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame,
+ uint32_t timestamp_rtp);
+
+// NOTE: Returns a new video frame that has to be released by calling
+// ReleaseJavaVideoFrame.
+ScopedJavaLocalRef<jobject> NativeToJavaVideoFrame(JNIEnv* jni,
+ const VideoFrame& frame);
+void ReleaseJavaVideoFrame(JNIEnv* jni, const JavaRef<jobject>& j_video_frame);
+
+int64_t GetJavaVideoFrameTimestampNs(JNIEnv* jni,
+ const JavaRef<jobject>& j_video_frame);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_FRAME_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc
new file mode 100644
index 0000000000..14321084d0
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.cc
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/video_sink.h"
+
+#include "sdk/android/generated_video_jni/VideoSink_jni.h"
+#include "sdk/android/src/jni/video_frame.h"
+
+namespace webrtc {
+namespace jni {
+
+VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink)
+ : j_sink_(jni, j_sink) {}
+
+VideoSinkWrapper::~VideoSinkWrapper() {}
+
+void VideoSinkWrapper::OnFrame(const VideoFrame& frame) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
+ Java_VideoSink_onFrame(jni, j_sink_, j_frame);
+ ReleaseJavaVideoFrame(jni, j_frame);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_sink.h b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h
new file mode 100644
index 0000000000..f16545434b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_sink.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
+#define SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
+
+#include <jni.h>
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+class VideoSinkWrapper : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ VideoSinkWrapper(JNIEnv* jni, const JavaRef<jobject>& j_sink);
+ ~VideoSinkWrapper() override;
+
+ private:
+ void OnFrame(const VideoFrame& frame) override;
+
+ const ScopedJavaGlobalRef<jobject> j_sink_;
+};
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_VIDEO_SINK_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/video_track.cc b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc
new file mode 100644
index 0000000000..70bedc12cf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/video_track.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "api/media_stream_interface.h"
+#include "sdk/android/generated_video_jni/VideoTrack_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_sink.h"
+
+namespace webrtc {
+namespace jni {
+
+static void JNI_VideoTrack_AddSink(JNIEnv* jni,
+ jlong j_native_track,
+ jlong j_native_sink) {
+ reinterpret_cast<VideoTrackInterface*>(j_native_track)
+ ->AddOrUpdateSink(
+ reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink),
+ rtc::VideoSinkWants());
+}
+
+static void JNI_VideoTrack_RemoveSink(JNIEnv* jni,
+ jlong j_native_track,
+ jlong j_native_sink) {
+ reinterpret_cast<VideoTrackInterface*>(j_native_track)
+ ->RemoveSink(reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(
+ j_native_sink));
+}
+
+static jlong JNI_VideoTrack_WrapSink(JNIEnv* jni,
+ const JavaParamRef<jobject>& sink) {
+ return jlongFromPointer(new VideoSinkWrapper(jni, sink));
+}
+
+static void JNI_VideoTrack_FreeSink(JNIEnv* jni,
+ jlong j_native_sink) {
+ delete reinterpret_cast<rtc::VideoSinkInterface<VideoFrame>*>(j_native_sink);
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc
new file mode 100644
index 0000000000..8b34495dc2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/vp8_codec.cc
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Decoder_jni.h"
+#include "sdk/android/generated_libvpx_vp8_jni/LibvpxVp8Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibvpxVp8Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(VP8Encoder::Create().release());
+}
+
+static jlong JNI_LibvpxVp8Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(VP8Decoder::Create().release());
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc
new file mode 100644
index 0000000000..ad9ca793ce
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/vp9_codec.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h"
+#include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+static jlong JNI_LibvpxVp9Encoder_CreateEncoder(JNIEnv* jni) {
+ return jlongFromPointer(VP9Encoder::Create().release());
+}
+
+static jboolean JNI_LibvpxVp9Encoder_IsSupported(JNIEnv* jni) {
+ return !SupportedVP9Codecs().empty();
+}
+
+static jlong JNI_LibvpxVp9Decoder_CreateDecoder(JNIEnv* jni) {
+ return jlongFromPointer(VP9Decoder::Create().release());
+}
+
+static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) {
+ return !SupportedVP9Codecs().empty();
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc
new file mode 100644
index 0000000000..f2c543e8c2
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+#include "sdk/android/generated_video_jni/WrappedNativeI420Buffer_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+// TODO(magjed): Write a test for this function.
+ScopedJavaLocalRef<jobject> WrapI420Buffer(
+ JNIEnv* jni,
+ const rtc::scoped_refptr<I420BufferInterface>& i420_buffer) {
+ ScopedJavaLocalRef<jobject> y_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataY()),
+ i420_buffer->StrideY() * i420_buffer->height());
+ ScopedJavaLocalRef<jobject> u_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataU()),
+ i420_buffer->StrideU() * i420_buffer->ChromaHeight());
+ ScopedJavaLocalRef<jobject> v_buffer =
+ NewDirectByteBuffer(jni, const_cast<uint8_t*>(i420_buffer->DataV()),
+ i420_buffer->StrideV() * i420_buffer->ChromaHeight());
+
+ return Java_WrappedNativeI420Buffer_Constructor(
+ jni, i420_buffer->width(), i420_buffer->height(), y_buffer,
+ i420_buffer->StrideY(), u_buffer, i420_buffer->StrideU(), v_buffer,
+ i420_buffer->StrideV(), jlongFromPointer(i420_buffer.get()));
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h
new file mode 100644
index 0000000000..70ad062cc6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
+#define SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
+
+#include <jni.h>
+
+#include "api/video/video_frame_buffer.h"
+#include "sdk/android/native_api/jni/scoped_java_ref.h"
+
+namespace webrtc {
+namespace jni {
+
+// This function wraps the C++ I420 buffer and returns a Java
+// VideoFrame.I420Buffer as a jobject.
+ScopedJavaLocalRef<jobject> WrapI420Buffer(
+ JNIEnv* jni,
+ const rtc::scoped_refptr<I420BufferInterface>& i420_buffer);
+
+} // namespace jni
+} // namespace webrtc
+
+#endif // SDK_ANDROID_SRC_JNI_WRAPPED_NATIVE_I420_BUFFER_H_
diff --git a/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc
new file mode 100644
index 0000000000..e812bc9527
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/src/jni/yuv_helper.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+
+#include "sdk/android/generated_video_jni/YuvHelper_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/planar_functions.h"
+
+namespace webrtc {
+namespace jni {
+
+void JNI_YuvHelper_CopyPlane(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src,
+ jint src_stride,
+ const JavaParamRef<jobject>& j_dst,
+ jint dst_stride,
+ jint width,
+ jint height) {
+ const uint8_t* src =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t* dst =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst.obj()));
+
+ libyuv::CopyPlane(src, src_stride, dst, dst_stride, width, height);
+}
+
+void JNI_YuvHelper_I420Copy(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint width,
+ jint height) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::I420Copy(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, width, height);
+}
+
+static void JNI_YuvHelper_I420ToNV12(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_uv,
+ jint dst_stride_uv,
+ jint width,
+ jint height) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_uv =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_uv.obj()));
+
+ libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
+ width, height);
+}
+
+void JNI_YuvHelper_I420Rotate(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src_y,
+ jint src_stride_y,
+ const JavaParamRef<jobject>& j_src_u,
+ jint src_stride_u,
+ const JavaParamRef<jobject>& j_src_v,
+ jint src_stride_v,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint src_width,
+ jint src_height,
+ jint rotation_mode) {
+ const uint8_t* src_y =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_y.obj()));
+ const uint8_t* src_u =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_u.obj()));
+ const uint8_t* src_v =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src_v.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v,
+ src_stride_v, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, src_width, src_height,
+ static_cast<libyuv::RotationMode>(rotation_mode));
+}
+
+void JNI_YuvHelper_ABGRToI420(JNIEnv* jni,
+ const JavaParamRef<jobject>& j_src,
+ jint src_stride,
+ const JavaParamRef<jobject>& j_dst_y,
+ jint dst_stride_y,
+ const JavaParamRef<jobject>& j_dst_u,
+ jint dst_stride_u,
+ const JavaParamRef<jobject>& j_dst_v,
+ jint dst_stride_v,
+ jint src_width,
+ jint src_height) {
+ const uint8_t* src =
+ static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj()));
+ uint8_t* dst_y =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y.obj()));
+ uint8_t* dst_u =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u.obj()));
+ uint8_t* dst_v =
+ static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v.obj()));
+
+ libyuv::ABGRToI420(src, src_stride, dst_y, dst_stride_y, dst_u, dst_stride_u,
+ dst_v, dst_stride_v, src_width, src_height);
+}
+
+} // namespace jni
+} // namespace webrtc