summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android/native_unittests
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /third_party/libwebrtc/sdk/android/native_unittests
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/sdk/android/native_unittests')
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/DEPS5
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc330
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc24
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h23
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc1161
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS3
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc57
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc76
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java20
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java59
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java31
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java60
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java25
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java30
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java33
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS6
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc115
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc275
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc23
-rw-r--r--third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc175
20 files changed, 2531 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/DEPS
new file mode 100644
index 0000000000..7825103fb4
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+modules/audio_device/include/audio_device.h",
+ "+modules/audio_device/include/mock_audio_transport.h",
+ "+system_wrappers/include",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc
new file mode 100644
index 0000000000..9aec62d630
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/android_network_monitor_unittest.cc
@@ -0,0 +1,330 @@
+/*
+ * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/src/jni/android_network_monitor.h"
+
+#include "rtc_base/ip_address.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gtest.h"
+#include "test/scoped_key_value_config.h"
+
+namespace webrtc {
+namespace test {
+static const uint32_t kTestIpv4Address = 0xC0A80011; // 192.168.0.17
+// The following two ipv6 addresses only diff by the last 64 bits.
+static const char kTestIpv6Address1[] = "2a00:8a00:a000:1190:0000:0001:000:252";
+static const char kTestIpv6Address2[] = "2a00:8a00:a000:1190:0000:0002:000:253";
+
+jni::NetworkInformation CreateNetworkInformation(
+ const std::string& interface_name,
+ jni::NetworkHandle network_handle,
+ const rtc::IPAddress& ip_address) {
+ jni::NetworkInformation net_info;
+ net_info.interface_name = interface_name;
+ net_info.handle = network_handle;
+ net_info.type = jni::NETWORK_WIFI;
+ net_info.ip_addresses.push_back(ip_address);
+ return net_info;
+}
+
+rtc::IPAddress GetIpAddressFromIpv6String(const std::string& str) {
+ rtc::IPAddress ipv6;
+ RTC_CHECK(rtc::IPFromString(str, &ipv6));
+ return ipv6;
+}
+
+class AndroidNetworkMonitorTest : public ::testing::Test {
+ public:
+ AndroidNetworkMonitorTest() {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(env);
+ network_monitor_ = std::make_unique<jni::AndroidNetworkMonitor>(
+ env, context, field_trials_);
+ }
+
+ void SetUp() override {
+ // Reset network monitor states.
+ network_monitor_->Stop();
+ }
+
+ void TearDown() override {
+ // The network monitor must be stopped, before it is destructed.
+ network_monitor_->Stop();
+ }
+
+ void Disconnect(jni::NetworkHandle handle) {
+ network_monitor_->OnNetworkDisconnected_n(handle);
+ }
+
+ protected:
+ test::ScopedKeyValueConfig field_trials_;
+ rtc::AutoThread main_thread_;
+ std::unique_ptr<jni::AndroidNetworkMonitor> network_monitor_;
+};
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIpv4Address) {
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, "");
+
+ ASSERT_TRUE(network_handle.has_value());
+ EXPECT_EQ(ipv4_handle, *network_handle);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingFullIpv6Address) {
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, "");
+ auto network_handle2 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address2, "");
+
+ ASSERT_TRUE(network_handle1.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle1);
+ EXPECT_TRUE(!network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest,
+ TestFindNetworkHandleIgnoringIpv6TemporaryPart) {
+ ScopedKeyValueConfig field_trials(
+ field_trials_,
+ "WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart/Enabled/");
+ // Start() updates the states introduced by the field trial.
+ network_monitor_->Start();
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ rtc::IPAddress ipv6_address2 = GetIpAddressFromIpv6String(kTestIpv6Address2);
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address1, "");
+ auto network_handle2 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address2, "");
+
+ ASSERT_TRUE(network_handle1.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle1);
+ ASSERT_TRUE(network_handle2.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestFindNetworkHandleUsingIfName) {
+ // Start() updates the states introduced by the field trial.
+ network_monitor_->Start();
+ jni::NetworkHandle ipv6_handle = 200;
+ rtc::IPAddress ipv6_address1 = GetIpAddressFromIpv6String(kTestIpv6Address1);
+
+ // Set up an IPv6 network.
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address1);
+ std::vector<jni::NetworkInformation> net_infos(1, net_info);
+ network_monitor_->SetNetworkInfos(net_infos);
+
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+
+ // Search using ip address only...
+ auto network_handle1 =
+ network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, "");
+
+ // Search using ip address AND if_name (for typical ipv4 over ipv6 tunnel).
+ auto network_handle2 = network_monitor_->FindNetworkHandleFromAddressOrName(
+ ipv4_address, "v4-wlan0");
+
+ ASSERT_FALSE(network_handle1.has_value());
+ ASSERT_TRUE(network_handle2.has_value());
+ EXPECT_EQ(ipv6_handle, *network_handle2);
+}
+
+TEST_F(AndroidNetworkMonitorTest, TestUnderlyingVpnType) {
+ ScopedKeyValueConfig field_trials(field_trials_,
+ "WebRTC-BindUsingInterfaceName/Enabled/");
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_VPN;
+ net_info.underlying_type_for_vpn = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_EQ(
+ rtc::ADAPTER_TYPE_WIFI,
+ network_monitor_->GetInterfaceInfo("v4-wlan0").underlying_type_for_vpn);
+}
+
+// Verify that Disconnect makes interface unavailable.
+TEST_F(AndroidNetworkMonitorTest, Disconnect) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_TRUE(network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+
+ // Check that values are reset on disconnect().
+ Disconnect(ipv4_handle);
+ EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_FALSE(
+ network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_UNKNOWN);
+}
+
+// Verify that Stop() resets all caches.
+TEST_F(AndroidNetworkMonitorTest, Reset) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info.type = jni::NETWORK_WIFI;
+ network_monitor_->SetNetworkInfos({net_info});
+
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_TRUE(network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+
+ // Check that values are reset on Stop().
+ network_monitor_->Stop();
+ EXPECT_FALSE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_FALSE(
+ network_monitor_
+ ->FindNetworkHandleFromAddressOrName(ipv4_address, "v4-wlan0")
+ .has_value());
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v4-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_UNKNOWN);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfname) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ // But both IP addresses are still searchable.
+ EXPECT_EQ(
+ *network_monitor_->FindNetworkHandleFromAddressOrName(ipv4_address, ""),
+ ipv4_handle);
+ EXPECT_EQ(
+ *network_monitor_->FindNetworkHandleFromAddressOrName(ipv6_address, ""),
+ ipv6_handle);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectOwner) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ Disconnect(ipv6_handle);
+
+ // We should now find ipv4_handle.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_WIFI);
+}
+
+TEST_F(AndroidNetworkMonitorTest, DuplicateIfnameDisconnectNonOwner) {
+ network_monitor_->Start();
+
+ jni::NetworkHandle ipv4_handle = 100;
+ rtc::IPAddress ipv4_address(kTestIpv4Address);
+ jni::NetworkInformation net_info1 =
+ CreateNetworkInformation("wlan0", ipv4_handle, ipv4_address);
+ net_info1.type = jni::NETWORK_WIFI;
+
+ jni::NetworkHandle ipv6_handle = 101;
+ rtc::IPAddress ipv6_address = GetIpAddressFromIpv6String(kTestIpv6Address1);
+ jni::NetworkInformation net_info2 =
+ CreateNetworkInformation("wlan0", ipv6_handle, ipv6_address);
+ net_info2.type = jni::NETWORK_UNKNOWN_CELLULAR;
+
+ network_monitor_->SetNetworkInfos({net_info1, net_info2});
+
+ // The last added.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+
+ Disconnect(ipv4_handle);
+
+ // We should still find ipv6 network.
+ EXPECT_TRUE(network_monitor_->GetInterfaceInfo("wlan0").available);
+ EXPECT_EQ(network_monitor_->GetInterfaceInfo("v-wlan0").adapter_type,
+ rtc::ADAPTER_TYPE_CELLULAR);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc
new file mode 100644
index 0000000000..07b3c04faf
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/native_unittests/application_context_provider.h"
+
+#include "sdk/android/generated_native_unittests_jni/ApplicationContextProvider_jni.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+
+ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni) {
+ return ScopedJavaLocalRef<jobject>(
+ jni::Java_ApplicationContextProvider_getApplicationContextForTest(jni));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h
new file mode 100644
index 0000000000..8aace02c32
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/application_context_provider.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
+#define SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace test {
+
+ScopedJavaLocalRef<jobject> GetAppContextForTest(JNIEnv* jni);
+
+} // namespace test
+} // namespace webrtc
+
+#endif // SDK_ANDROID_NATIVE_UNITTESTS_APPLICATION_CONTEXT_PROVIDER_H_
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
new file mode 100644
index 0000000000..7d582d49db
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/audio_device/audio_device_unittest.cc
@@ -0,0 +1,1161 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/audio_device/include/audio_device.h"
+
+#include <list>
+#include <memory>
+#include <numeric>
+
+#include "api/scoped_refptr.h"
+#include "modules/audio_device/include/mock_audio_transport.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/event.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/time_utils.h"
+#include "sdk/android/generated_native_unittests_jni/BuildInfo_jni.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/audio_device/audio_common.h"
+#include "sdk/android/src/jni/audio_device/audio_device_module.h"
+#include "sdk/android/src/jni/audio_device/opensles_common.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+#include "test/testsupport/file_utils.h"
+
+using std::cout;
+using std::endl;
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Gt;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::NotNull;
+using ::testing::Return;
+
+// #define ENABLE_DEBUG_PRINTF
+#ifdef ENABLE_DEBUG_PRINTF
+#define PRINTD(...) fprintf(stderr, __VA_ARGS__);
+#else
+#define PRINTD(...) ((void)0)
+#endif
+#define PRINT(...) fprintf(stderr, __VA_ARGS__);
+
+namespace webrtc {
+
+namespace jni {
+
+// Number of callbacks (input or output) the tests waits for before we set
+// an event indicating that the test was OK.
+static const size_t kNumCallbacks = 10;
+// Max amount of time we wait for an event to be set while counting callbacks.
+static const int kTestTimeOutInMilliseconds = 10 * 1000;
+// Average number of audio callbacks per second assuming 10ms packet size.
+static const size_t kNumCallbacksPerSecond = 100;
+// Play out a test file during this time (unit is in seconds).
+static const int kFilePlayTimeInSec = 5;
+static const size_t kBitsPerSample = 16;
+static const size_t kBytesPerSample = kBitsPerSample / 8;
+// Run the full-duplex test during this time (unit is in seconds).
+// Note that first `kNumIgnoreFirstCallbacks` are ignored.
+static const int kFullDuplexTimeInSec = 5;
+// Wait for the callback sequence to stabilize by ignoring this amount of the
+// initial callbacks (avoids initial FIFO access).
+// Only used in the RunPlayoutAndRecordingInFullDuplex test.
+static const size_t kNumIgnoreFirstCallbacks = 50;
+// Sets the number of impulses per second in the latency test.
+static const int kImpulseFrequencyInHz = 1;
+// Length of round-trip latency measurements. Number of transmitted impulses
+// is kImpulseFrequencyInHz * kMeasureLatencyTimeInSec - 1.
+static const int kMeasureLatencyTimeInSec = 11;
+// Utilized in round-trip latency measurements to avoid capturing noise samples.
+static const int kImpulseThreshold = 1000;
+static const char kTag[] = "[..........] ";
+
+enum TransportType {
+ kPlayout = 0x1,
+ kRecording = 0x2,
+};
+
+// Interface for processing the audio stream. Real implementations can e.g.
+// run audio in loopback, read audio from a file or perform latency
+// measurements.
+class AudioStreamInterface {
+ public:
+ virtual void Write(const void* source, size_t num_frames) = 0;
+ virtual void Read(void* destination, size_t num_frames) = 0;
+
+ protected:
+ virtual ~AudioStreamInterface() {}
+};
+
+// Reads audio samples from a PCM file where the file is stored in memory at
+// construction.
+class FileAudioStream : public AudioStreamInterface {
+ public:
+ FileAudioStream(size_t num_callbacks,
+ const std::string& file_name,
+ int sample_rate)
+ : file_size_in_bytes_(0), sample_rate_(sample_rate), file_pos_(0) {
+ file_size_in_bytes_ = test::GetFileSize(file_name);
+ sample_rate_ = sample_rate;
+ EXPECT_GE(file_size_in_callbacks(), num_callbacks)
+ << "Size of test file is not large enough to last during the test.";
+ const size_t num_16bit_samples =
+ test::GetFileSize(file_name) / kBytesPerSample;
+ file_.reset(new int16_t[num_16bit_samples]);
+ FILE* audio_file = fopen(file_name.c_str(), "rb");
+ EXPECT_NE(audio_file, nullptr);
+ size_t num_samples_read =
+ fread(file_.get(), sizeof(int16_t), num_16bit_samples, audio_file);
+ EXPECT_EQ(num_samples_read, num_16bit_samples);
+ fclose(audio_file);
+ }
+
+ // AudioStreamInterface::Write() is not implemented.
+ void Write(const void* source, size_t num_frames) override {}
+
+ // Read samples from file stored in memory (at construction) and copy
+ // `num_frames` (<=> 10ms) to the `destination` byte buffer.
+ void Read(void* destination, size_t num_frames) override {
+ memcpy(destination, static_cast<int16_t*>(&file_[file_pos_]),
+ num_frames * sizeof(int16_t));
+ file_pos_ += num_frames;
+ }
+
+ int file_size_in_seconds() const {
+ return static_cast<int>(file_size_in_bytes_ /
+ (kBytesPerSample * sample_rate_));
+ }
+ size_t file_size_in_callbacks() const {
+ return file_size_in_seconds() * kNumCallbacksPerSecond;
+ }
+
+ private:
+ size_t file_size_in_bytes_;
+ int sample_rate_;
+ std::unique_ptr<int16_t[]> file_;
+ size_t file_pos_;
+};
+
+// Simple first in first out (FIFO) class that wraps a list of 16-bit audio
+// buffers of fixed size and allows Write and Read operations. The idea is to
+// store recorded audio buffers (using Write) and then read (using Read) these
+// stored buffers with as short delay as possible when the audio layer needs
+// data to play out. The number of buffers in the FIFO will stabilize under
+// normal conditions since there will be a balance between Write and Read calls.
+// The container is a std::list container and access is protected with a lock
+// since both sides (playout and recording) are driven by its own thread.
+class FifoAudioStream : public AudioStreamInterface {
+ public:
+ explicit FifoAudioStream(size_t frames_per_buffer)
+ : frames_per_buffer_(frames_per_buffer),
+ bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)),
+ fifo_(new AudioBufferList),
+ largest_size_(0),
+ total_written_elements_(0),
+ write_count_(0) {
+ EXPECT_NE(fifo_.get(), nullptr);
+ }
+
+ ~FifoAudioStream() { Flush(); }
+
+ // Allocate new memory, copy `num_frames` samples from `source` into memory
+ // and add pointer to the memory location to end of the list.
+ // Increases the size of the FIFO by one element.
+ void Write(const void* source, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ PRINTD("+");
+ if (write_count_++ < kNumIgnoreFirstCallbacks) {
+ return;
+ }
+ int16_t* memory = new int16_t[frames_per_buffer_];
+ memcpy(static_cast<int16_t*>(&memory[0]), source, bytes_per_buffer_);
+ MutexLock lock(&lock_);
+ fifo_->push_back(memory);
+ const size_t size = fifo_->size();
+ if (size > largest_size_) {
+ largest_size_ = size;
+ PRINTD("(%zu)", largest_size_);
+ }
+ total_written_elements_ += size;
+ }
+
+ // Read pointer to data buffer from front of list, copy `num_frames` of stored
+ // data into `destination` and delete the utilized memory allocation.
+ // Decreases the size of the FIFO by one element.
+ void Read(void* destination, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ PRINTD("-");
+ MutexLock lock(&lock_);
+ if (fifo_->empty()) {
+ memset(destination, 0, bytes_per_buffer_);
+ } else {
+ int16_t* memory = fifo_->front();
+ fifo_->pop_front();
+ memcpy(destination, static_cast<int16_t*>(&memory[0]), bytes_per_buffer_);
+ delete memory;
+ }
+ }
+
+ size_t size() const { return fifo_->size(); }
+
+ size_t largest_size() const { return largest_size_; }
+
+ size_t average_size() const {
+ return (total_written_elements_ == 0)
+ ? 0.0
+ : 0.5 + static_cast<float>(total_written_elements_) /
+ (write_count_ - kNumIgnoreFirstCallbacks);
+ }
+
+ private:
+ void Flush() {
+ for (auto it = fifo_->begin(); it != fifo_->end(); ++it) {
+ delete *it;
+ }
+ fifo_->clear();
+ }
+
+ using AudioBufferList = std::list<int16_t*>;
+ Mutex lock_;
+ const size_t frames_per_buffer_;
+ const size_t bytes_per_buffer_;
+ std::unique_ptr<AudioBufferList> fifo_;
+ size_t largest_size_;
+ size_t total_written_elements_;
+ size_t write_count_;
+};
+
+// Inserts periodic impulses and measures the latency between the time of
+// transmission and time of receiving the same impulse.
+// Usage requires a special hardware called Audio Loopback Dongle.
+// See http://source.android.com/devices/audio/loopback.html for details.
+class LatencyMeasuringAudioStream : public AudioStreamInterface {
+ public:
+ explicit LatencyMeasuringAudioStream(size_t frames_per_buffer)
+ : frames_per_buffer_(frames_per_buffer),
+ bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)),
+ play_count_(0),
+ rec_count_(0),
+ pulse_time_(0) {}
+
+ // Insert periodic impulses in first two samples of `destination`.
+ void Read(void* destination, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ if (play_count_ == 0) {
+ PRINT("[");
+ }
+ play_count_++;
+ memset(destination, 0, bytes_per_buffer_);
+ if (play_count_ % (kNumCallbacksPerSecond / kImpulseFrequencyInHz) == 0) {
+ if (pulse_time_ == 0) {
+ pulse_time_ = rtc::TimeMillis();
+ }
+ PRINT(".");
+ const int16_t impulse = std::numeric_limits<int16_t>::max();
+ int16_t* ptr16 = static_cast<int16_t*>(destination);
+ for (size_t i = 0; i < 2; ++i) {
+ ptr16[i] = impulse;
+ }
+ }
+ }
+
+ // Detect received impulses in `source`, derive time between transmission and
+ // detection and add the calculated delay to list of latencies.
+ void Write(const void* source, size_t num_frames) override {
+ ASSERT_EQ(num_frames, frames_per_buffer_);
+ rec_count_++;
+ if (pulse_time_ == 0) {
+ // Avoid detection of new impulse response until a new impulse has
+ // been transmitted (sets `pulse_time_` to value larger than zero).
+ return;
+ }
+ const int16_t* ptr16 = static_cast<const int16_t*>(source);
+ std::vector<int16_t> vec(ptr16, ptr16 + num_frames);
+ // Find max value in the audio buffer.
+ int max = *std::max_element(vec.begin(), vec.end());
+ // Find index (element position in vector) of the max element.
+ int index_of_max =
+ std::distance(vec.begin(), std::find(vec.begin(), vec.end(), max));
+ if (max > kImpulseThreshold) {
+ PRINTD("(%d,%d)", max, index_of_max);
+ int64_t now_time = rtc::TimeMillis();
+ int extra_delay = IndexToMilliseconds(static_cast<double>(index_of_max));
+ PRINTD("[%d]", static_cast<int>(now_time - pulse_time_));
+ PRINTD("[%d]", extra_delay);
+ // Total latency is the difference between transmit time and detection
+ // tome plus the extra delay within the buffer in which we detected the
+ // received impulse. It is transmitted at sample 0 but can be received
+ // at sample N where N > 0. The term `extra_delay` accounts for N and it
+ // is a value between 0 and 10ms.
+ latencies_.push_back(now_time - pulse_time_ + extra_delay);
+ pulse_time_ = 0;
+ } else {
+ PRINTD("-");
+ }
+ }
+
+ size_t num_latency_values() const { return latencies_.size(); }
+
+ int min_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return *std::min_element(latencies_.begin(), latencies_.end());
+ }
+
+ int max_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return *std::max_element(latencies_.begin(), latencies_.end());
+ }
+
+ int average_latency() const {
+ if (latencies_.empty())
+ return 0;
+ return 0.5 + static_cast<double>(
+ std::accumulate(latencies_.begin(), latencies_.end(), 0)) /
+ latencies_.size();
+ }
+
+ void PrintResults() const {
+ PRINT("] ");
+ for (auto it = latencies_.begin(); it != latencies_.end(); ++it) {
+ PRINT("%d ", *it);
+ }
+ PRINT("\n");
+ PRINT("%s[min, max, avg]=[%d, %d, %d] ms\n", kTag, min_latency(),
+ max_latency(), average_latency());
+ }
+
+ int IndexToMilliseconds(double index) const {
+ return static_cast<int>(10.0 * (index / frames_per_buffer_) + 0.5);
+ }
+
+ private:
+ const size_t frames_per_buffer_;
+ const size_t bytes_per_buffer_;
+ size_t play_count_;
+ size_t rec_count_;
+ int64_t pulse_time_;
+ std::vector<int> latencies_;
+};
+
+// Mocks the AudioTransport object and proxies actions for the two callbacks
+// (RecordedDataIsAvailable and NeedMorePlayData) to different implementations
+// of AudioStreamInterface.
+class MockAudioTransportAndroid : public test::MockAudioTransport {
+ public:
+ explicit MockAudioTransportAndroid(int type)
+ : num_callbacks_(0),
+ type_(type),
+ play_count_(0),
+ rec_count_(0),
+ audio_stream_(nullptr) {}
+
+ virtual ~MockAudioTransportAndroid() {}
+
+ // Set default actions of the mock object. We are delegating to fake
+ // implementations (of AudioStreamInterface) here.
+ void HandleCallbacks(rtc::Event* test_is_done,
+ AudioStreamInterface* audio_stream,
+ int num_callbacks) {
+ test_is_done_ = test_is_done;
+ audio_stream_ = audio_stream;
+ num_callbacks_ = num_callbacks;
+ if (play_mode()) {
+ ON_CALL(*this, NeedMorePlayData(_, _, _, _, _, _, _, _))
+ .WillByDefault(
+ Invoke(this, &MockAudioTransportAndroid::RealNeedMorePlayData));
+ }
+ if (rec_mode()) {
+ ON_CALL(*this, RecordedDataIsAvailable(_, _, _, _, _, _, _, _, _, _))
+ .WillByDefault(Invoke(
+ this, &MockAudioTransportAndroid::RealRecordedDataIsAvailable));
+ }
+ }
+
+ int32_t RealRecordedDataIsAvailable(const void* audioSamples,
+ const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ const uint32_t& newMicLevel) {
+ EXPECT_TRUE(rec_mode()) << "No test is expecting these callbacks.";
+ rec_count_++;
+ // Process the recorded audio stream if an AudioStreamInterface
+ // implementation exists.
+ if (audio_stream_) {
+ audio_stream_->Write(audioSamples, nSamples);
+ }
+ if (ReceivedEnoughCallbacks()) {
+ test_is_done_->Set();
+ }
+ return 0;
+ }
+
+ int32_t RealNeedMorePlayData(const size_t nSamples,
+ const size_t nBytesPerSample,
+ const size_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ size_t& nSamplesOut, // NOLINT
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {
+ EXPECT_TRUE(play_mode()) << "No test is expecting these callbacks.";
+ play_count_++;
+ nSamplesOut = nSamples;
+ // Read (possibly processed) audio stream samples to be played out if an
+ // AudioStreamInterface implementation exists.
+ if (audio_stream_) {
+ audio_stream_->Read(audioSamples, nSamples);
+ }
+ if (ReceivedEnoughCallbacks()) {
+ test_is_done_->Set();
+ }
+ return 0;
+ }
+
+ bool ReceivedEnoughCallbacks() {
+ bool recording_done = false;
+ if (rec_mode())
+ recording_done = rec_count_ >= num_callbacks_;
+ else
+ recording_done = true;
+
+ bool playout_done = false;
+ if (play_mode())
+ playout_done = play_count_ >= num_callbacks_;
+ else
+ playout_done = true;
+
+ return recording_done && playout_done;
+ }
+
+ bool play_mode() const { return type_ & kPlayout; }
+ bool rec_mode() const { return type_ & kRecording; }
+
+ private:
+ rtc::Event* test_is_done_;
+ size_t num_callbacks_;
+ int type_;
+ size_t play_count_;
+ size_t rec_count_;
+ AudioStreamInterface* audio_stream_;
+ std::unique_ptr<LatencyMeasuringAudioStream> latency_audio_stream_;
+};
+
+// AudioDeviceTest test fixture.
+class AudioDeviceTest : public ::testing::Test {
+ protected:
+ AudioDeviceTest() {
+ // One-time initialization of JVM and application context. Ensures that we
+ // can do calls between C++ and Java. Initializes both Java and OpenSL ES
+ // implementations.
+ // Creates an audio device using a default audio layer.
+ jni_ = AttachCurrentThreadIfNeeded();
+ context_ = test::GetAppContextForTest(jni_);
+ audio_device_ = CreateJavaAudioDeviceModule(jni_, context_.obj());
+ EXPECT_NE(audio_device_.get(), nullptr);
+ EXPECT_EQ(0, audio_device_->Init());
+ audio_manager_ = GetAudioManager(jni_, context_);
+ UpdateParameters();
+ }
+ virtual ~AudioDeviceTest() { EXPECT_EQ(0, audio_device_->Terminate()); }
+
+ int total_delay_ms() const { return 10; }
+
+ void UpdateParameters() {
+ int input_sample_rate = GetDefaultSampleRate(jni_, audio_manager_);
+ int output_sample_rate = GetDefaultSampleRate(jni_, audio_manager_);
+ bool stereo_playout_is_available;
+ bool stereo_record_is_available;
+ audio_device_->StereoPlayoutIsAvailable(&stereo_playout_is_available);
+ audio_device_->StereoRecordingIsAvailable(&stereo_record_is_available);
+ GetAudioParameters(jni_, context_, audio_manager_, input_sample_rate,
+ output_sample_rate, stereo_playout_is_available,
+ stereo_record_is_available, &input_parameters_,
+ &output_parameters_);
+ }
+
+ void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer) {
+ audio_device_ = CreateAudioDevice(audio_layer);
+ EXPECT_NE(audio_device_.get(), nullptr);
+ EXPECT_EQ(0, audio_device_->Init());
+ UpdateParameters();
+ }
+
+ int playout_sample_rate() const { return output_parameters_.sample_rate(); }
+ int record_sample_rate() const { return input_parameters_.sample_rate(); }
+ size_t playout_channels() const { return output_parameters_.channels(); }
+ size_t record_channels() const { return input_parameters_.channels(); }
+ size_t playout_frames_per_10ms_buffer() const {
+ return output_parameters_.frames_per_10ms_buffer();
+ }
+ size_t record_frames_per_10ms_buffer() const {
+ return input_parameters_.frames_per_10ms_buffer();
+ }
+
+ rtc::scoped_refptr<AudioDeviceModule> audio_device() const {
+ return audio_device_;
+ }
+
+ rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
+ AudioDeviceModule::AudioLayer audio_layer) {
+#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+ if (audio_layer == AudioDeviceModule::kAndroidAAudioAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateAAudioAudioDeviceModule(jni_, context_.obj()));
+ }
+#endif
+ if (audio_layer == AudioDeviceModule::kAndroidJavaAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateJavaAudioDeviceModule(jni_, context_.obj()));
+ } else if (audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateOpenSLESAudioDeviceModule(jni_, context_.obj()));
+ } else if (audio_layer ==
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio) {
+ return rtc::scoped_refptr<AudioDeviceModule>(
+ CreateJavaInputAndOpenSLESOutputAudioDeviceModule(jni_,
+ context_.obj()));
+ } else {
+ return nullptr;
+ }
+ }
+
+ // Returns file name relative to the resource root given a sample rate.
+ std::string GetFileName(int sample_rate) {
+ EXPECT_TRUE(sample_rate == 48000 || sample_rate == 44100);
+ char fname[64];
+ snprintf(fname, sizeof(fname), "audio_device/audio_short%d",
+ sample_rate / 1000);
+ std::string file_name(webrtc::test::ResourcePath(fname, "pcm"));
+ EXPECT_TRUE(test::FileExists(file_name));
+#ifdef ENABLE_PRINTF
+ PRINT("file name: %s\n", file_name.c_str());
+ const size_t bytes = test::GetFileSize(file_name);
+ PRINT("file size: %zu [bytes]\n", bytes);
+ PRINT("file size: %zu [samples]\n", bytes / kBytesPerSample);
+ const int seconds =
+ static_cast<int>(bytes / (sample_rate * kBytesPerSample));
+ PRINT("file size: %d [secs]\n", seconds);
+ PRINT("file size: %zu [callbacks]\n", seconds * kNumCallbacksPerSecond);
+#endif
+ return file_name;
+ }
+
+ AudioDeviceModule::AudioLayer GetActiveAudioLayer() const {
+ AudioDeviceModule::AudioLayer audio_layer;
+ EXPECT_EQ(0, audio_device()->ActiveAudioLayer(&audio_layer));
+ return audio_layer;
+ }
+
+ int TestDelayOnAudioLayer(
+ const AudioDeviceModule::AudioLayer& layer_to_test) {
+ rtc::scoped_refptr<AudioDeviceModule> audio_device;
+ audio_device = CreateAudioDevice(layer_to_test);
+ EXPECT_NE(audio_device.get(), nullptr);
+ uint16_t playout_delay;
+ EXPECT_EQ(0, audio_device->PlayoutDelay(&playout_delay));
+ return playout_delay;
+ }
+
+ AudioDeviceModule::AudioLayer TestActiveAudioLayer(
+ const AudioDeviceModule::AudioLayer& layer_to_test) {
+ rtc::scoped_refptr<AudioDeviceModule> audio_device;
+ audio_device = CreateAudioDevice(layer_to_test);
+ EXPECT_NE(audio_device.get(), nullptr);
+ AudioDeviceModule::AudioLayer active;
+ EXPECT_EQ(0, audio_device->ActiveAudioLayer(&active));
+ return active;
+ }
+
+ // One way to ensure that the engine object is valid is to create an
+ // SL Engine interface since it exposes creation methods of all the OpenSL ES
+ // object types and it is only supported on the engine object. This method
+ // also verifies that the engine interface supports at least one interface.
+ // Note that, the test below is not a full test of the SLEngineItf object
+ // but only a simple sanity test to check that the global engine object is OK.
+ void ValidateSLEngine(SLObjectItf engine_object) {
+ EXPECT_NE(nullptr, engine_object);
+ // Get the SL Engine interface which is exposed by the engine object.
+ SLEngineItf engine;
+ SLresult result =
+ (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine);
+ EXPECT_EQ(result, SL_RESULT_SUCCESS) << "GetInterface() on engine failed";
+ // Ensure that the SL Engine interface exposes at least one interface.
+ SLuint32 object_id = SL_OBJECTID_ENGINE;
+ SLuint32 num_supported_interfaces = 0;
+ result = (*engine)->QueryNumSupportedInterfaces(engine, object_id,
+ &num_supported_interfaces);
+ EXPECT_EQ(result, SL_RESULT_SUCCESS)
+ << "QueryNumSupportedInterfaces() failed";
+ EXPECT_GE(num_supported_interfaces, 1u);
+ }
+
+ // Volume control is currently only supported for the Java output audio layer.
+ // For OpenSL ES, the internal stream volume is always on max level and there
+ // is no need for this test to set it to max.
+ bool AudioLayerSupportsVolumeControl() const {
+ return GetActiveAudioLayer() == AudioDeviceModule::kAndroidJavaAudio;
+ }
+
+ void SetMaxPlayoutVolume() {
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ uint32_t max_volume;
+ EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume));
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume));
+ }
+
+ void DisableBuiltInAECIfAvailable() {
+ if (audio_device()->BuiltInAECIsAvailable()) {
+ EXPECT_EQ(0, audio_device()->EnableBuiltInAEC(false));
+ }
+ }
+
+ void StartPlayout() {
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+ EXPECT_FALSE(audio_device()->Playing());
+ EXPECT_EQ(0, audio_device()->InitPlayout());
+ EXPECT_TRUE(audio_device()->PlayoutIsInitialized());
+ EXPECT_EQ(0, audio_device()->StartPlayout());
+ EXPECT_TRUE(audio_device()->Playing());
+ }
+
+ void StopPlayout() {
+ EXPECT_EQ(0, audio_device()->StopPlayout());
+ EXPECT_FALSE(audio_device()->Playing());
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+ }
+
+ void StartRecording() {
+ EXPECT_FALSE(audio_device()->RecordingIsInitialized());
+ EXPECT_FALSE(audio_device()->Recording());
+ EXPECT_EQ(0, audio_device()->InitRecording());
+ EXPECT_TRUE(audio_device()->RecordingIsInitialized());
+ EXPECT_EQ(0, audio_device()->StartRecording());
+ EXPECT_TRUE(audio_device()->Recording());
+ }
+
+ void StopRecording() {
+ EXPECT_EQ(0, audio_device()->StopRecording());
+ EXPECT_FALSE(audio_device()->Recording());
+ }
+
+ int GetMaxSpeakerVolume() const {
+ uint32_t max_volume(0);
+ EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume));
+ return max_volume;
+ }
+
+ int GetMinSpeakerVolume() const {
+ uint32_t min_volume(0);
+ EXPECT_EQ(0, audio_device()->MinSpeakerVolume(&min_volume));
+ return min_volume;
+ }
+
+ int GetSpeakerVolume() const {
+ uint32_t volume(0);
+ EXPECT_EQ(0, audio_device()->SpeakerVolume(&volume));
+ return volume;
+ }
+
+ JNIEnv* jni_;
+ ScopedJavaLocalRef<jobject> context_;
+ rtc::Event test_is_done_;
+ rtc::scoped_refptr<AudioDeviceModule> audio_device_;
+ ScopedJavaLocalRef<jobject> audio_manager_;
+ AudioParameters output_parameters_;
+ AudioParameters input_parameters_;
+};
+
+TEST_F(AudioDeviceTest, ConstructDestruct) {
+ // Using the test fixture to create and destruct the audio device module.
+}
+
+// Verify that it is possible to explicitly create the two types of supported
+// ADMs. These two tests overrides the default selection of native audio layer
+// by ignoring if the device supports low-latency output or not.
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForCombinedJavaOpenSLCombo) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForJavaInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidJavaAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForOpenSLInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidOpenSLESAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+// TODO(bugs.webrtc.org/8914)
+// TODO(phensman): Add test for AAudio/Java combination when this combination
+// is supported.
+#if !defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
+#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
+ DISABLED_CorrectAudioLayerIsUsedForAAudioInBothDirections
+#else
+#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \
+ CorrectAudioLayerIsUsedForAAudioInBothDirections
+#endif
+TEST_F(AudioDeviceTest,
+ MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections) {
+ AudioDeviceModule::AudioLayer expected_layer =
+ AudioDeviceModule::kAndroidAAudioAudio;
+ AudioDeviceModule::AudioLayer active_layer =
+ TestActiveAudioLayer(expected_layer);
+ EXPECT_EQ(expected_layer, active_layer);
+}
+
+// The Android ADM supports two different delay reporting modes. One for the
+// low-latency output path (in combination with OpenSL ES), and one for the
+// high-latency output path (Java backends in both directions). These two tests
+// verifies that the audio device reports correct delay estimate given the
+// selected audio layer. Note that, this delay estimate will only be utilized
+// if the HW AEC is disabled.
+// Delay should be 75 ms in high latency and 25 ms in low latency.
+TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForHighLatencyOutputPath) {
+ EXPECT_EQ(75, TestDelayOnAudioLayer(AudioDeviceModule::kAndroidJavaAudio));
+}
+
+TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForLowLatencyOutputPath) {
+ EXPECT_EQ(25,
+ TestDelayOnAudioLayer(
+ AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio));
+}
+
+TEST_F(AudioDeviceTest, InitTerminate) {
+ // Initialization is part of the test fixture.
+ EXPECT_TRUE(audio_device()->Initialized());
+ EXPECT_EQ(0, audio_device()->Terminate());
+ EXPECT_FALSE(audio_device()->Initialized());
+}
+
+TEST_F(AudioDeviceTest, Devices) {
+ // Device enumeration is not supported. Verify fixed values only.
+ EXPECT_EQ(1, audio_device()->PlayoutDevices());
+ EXPECT_EQ(1, audio_device()->RecordingDevices());
+}
+
+TEST_F(AudioDeviceTest, IsAcousticEchoCancelerSupported) {
+ PRINT("%sAcoustic Echo Canceler support: %s\n", kTag,
+ audio_device()->BuiltInAECIsAvailable() ? "Yes" : "No");
+}
+
+TEST_F(AudioDeviceTest, IsNoiseSuppressorSupported) {
+ PRINT("%sNoise Suppressor support: %s\n", kTag,
+ audio_device()->BuiltInNSIsAvailable() ? "Yes" : "No");
+}
+
+// Verify that playout side is configured for mono by default.
+TEST_F(AudioDeviceTest, UsesMonoPlayoutByDefault) {
+ EXPECT_EQ(1u, output_parameters_.channels());
+}
+
+// Verify that recording side is configured for mono by default.
+TEST_F(AudioDeviceTest, UsesMonoRecordingByDefault) {
+ EXPECT_EQ(1u, input_parameters_.channels());
+}
+
+TEST_F(AudioDeviceTest, SpeakerVolumeShouldBeAvailable) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ bool available;
+ EXPECT_EQ(0, audio_device()->SpeakerVolumeIsAvailable(&available));
+ EXPECT_TRUE(available);
+}
+
+TEST_F(AudioDeviceTest, MaxSpeakerVolumeIsPositive) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ StartPlayout();
+ EXPECT_GT(GetMaxSpeakerVolume(), 0);
+ StopPlayout();
+}
+
+TEST_F(AudioDeviceTest, MinSpeakerVolumeIsZero) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ EXPECT_EQ(GetMinSpeakerVolume(), 0);
+}
+
+TEST_F(AudioDeviceTest, DefaultSpeakerVolumeIsWithinMinMax) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ const int default_volume = GetSpeakerVolume();
+ EXPECT_GE(default_volume, GetMinSpeakerVolume());
+ EXPECT_LE(default_volume, GetMaxSpeakerVolume());
+}
+
+TEST_F(AudioDeviceTest, SetSpeakerVolumeActuallySetsVolume) {
+ // The OpenSL ES output audio path does not support volume control.
+ if (!AudioLayerSupportsVolumeControl())
+ return;
+ const int default_volume = GetSpeakerVolume();
+ const int max_volume = GetMaxSpeakerVolume();
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume));
+ int new_volume = GetSpeakerVolume();
+ EXPECT_EQ(new_volume, max_volume);
+ EXPECT_EQ(0, audio_device()->SetSpeakerVolume(default_volume));
+}
+
+// Tests that playout can be initiated, started and stopped. No audio callback
+// is registered in this test.
+TEST_F(AudioDeviceTest, StartStopPlayout) {
+ StartPlayout();
+ StopPlayout();
+ StartPlayout();
+ StopPlayout();
+}
+
+// Tests that recording can be initiated, started and stopped. No audio callback
+// is registered in this test.
+TEST_F(AudioDeviceTest, StartStopRecording) {
+ StartRecording();
+ StopRecording();
+ StartRecording();
+ StopRecording();
+}
+
+// Verify that calling StopPlayout() will leave us in an uninitialized state
+// which will require a new call to InitPlayout(). This test does not call
+// StartPlayout() while being uninitialized since doing so will hit a
+// RTC_DCHECK and death tests are not supported on Android.
+TEST_F(AudioDeviceTest, StopPlayoutRequiresInitToRestart) {
+ EXPECT_EQ(0, audio_device()->InitPlayout());
+ EXPECT_EQ(0, audio_device()->StartPlayout());
+ EXPECT_EQ(0, audio_device()->StopPlayout());
+ EXPECT_FALSE(audio_device()->PlayoutIsInitialized());
+}
+
+// Verify that calling StopRecording() will leave us in an uninitialized state
+// which will require a new call to InitRecording(). This test does not call
+// StartRecording() while being uninitialized since doing so will hit a
+// RTC_DCHECK and death tests are not supported on Android.
+TEST_F(AudioDeviceTest, StopRecordingRequiresInitToRestart) {
+ EXPECT_EQ(0, audio_device()->InitRecording());
+ EXPECT_EQ(0, audio_device()->StartRecording());
+ EXPECT_EQ(0, audio_device()->StopRecording());
+ EXPECT_FALSE(audio_device()->RecordingIsInitialized());
+}
+
+// Start playout and verify that the native audio layer starts asking for real
+// audio samples to play out using the NeedMorePlayData callback.
+TEST_F(AudioDeviceTest, StartPlayoutVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kPlayout);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(),
+ kBytesPerSample, playout_channels(),
+ playout_sample_rate(), NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopPlayout();
+}
+
+// Start recording and verify that the native audio layer starts feeding real
+// audio samples via the RecordedDataIsAvailable callback.
+TEST_F(AudioDeviceTest, StartRecordingVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kRecording);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(
+ mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(),
+ kBytesPerSample, record_channels(),
+ record_sample_rate(), _, 0, 0, false, _, _))
+ .Times(AtLeast(kNumCallbacks));
+
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartRecording();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopRecording();
+}
+
+// Start playout and recording (full-duplex audio) and verify that audio is
+// active in both directions.
+TEST_F(AudioDeviceTest, StartPlayoutAndRecordingVerifyCallbacks) {
+ MockAudioTransportAndroid mock(kPlayout | kRecording);
+ mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks);
+ EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(),
+ kBytesPerSample, playout_channels(),
+ playout_sample_rate(), NotNull(), _, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_CALL(
+ mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(),
+ kBytesPerSample, record_channels(),
+ record_sample_rate(), _, 0, 0, false, _, _))
+ .Times(AtLeast(kNumCallbacks));
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ StartRecording();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopRecording();
+ StopPlayout();
+}
+
+// Start playout and read audio from an external PCM file when the audio layer
+// asks for data to play out. Real audio is played out in this test but it does
+// not contain any explicit verification that the audio quality is perfect.
+TEST_F(AudioDeviceTest, RunPlayoutWithFileAsSource) {
+ // TODO(henrika): extend test when mono output is supported.
+ EXPECT_EQ(1u, playout_channels());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout);
+ const int num_callbacks = kFilePlayTimeInSec * kNumCallbacksPerSecond;
+ std::string file_name = GetFileName(playout_sample_rate());
+ std::unique_ptr<FileAudioStream> file_audio_stream(
+ new FileAudioStream(num_callbacks, file_name, playout_sample_rate()));
+ mock.HandleCallbacks(&test_is_done_, file_audio_stream.get(), num_callbacks);
+ // SetMaxPlayoutVolume();
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartPlayout();
+ test_is_done_.Wait(kTestTimeOutInMilliseconds);
+ StopPlayout();
+}
+
+// It should be possible to create an OpenSL engine object if OpenSL ES based
+// audio is requested in any direction.
+TEST_F(AudioDeviceTest, TestCreateOpenSLEngine) {
+ // Verify that the global (singleton) OpenSL Engine can be acquired.
+ OpenSLEngineManager engine_manager;
+ SLObjectItf engine_object = engine_manager.GetOpenSLEngine();
+ EXPECT_NE(nullptr, engine_object);
+ // Perform a simple sanity check of the created engine object.
+ ValidateSLEngine(engine_object);
+}
+
+// The audio device module only suppors the same sample rate in both directions.
+// In addition, in full-duplex low-latency mode (OpenSL ES), both input and
+// output must use the same native buffer size to allow for usage of the fast
+// audio track in Android.
+TEST_F(AudioDeviceTest, VerifyAudioParameters) {
+ EXPECT_EQ(output_parameters_.sample_rate(), input_parameters_.sample_rate());
+ SetActiveAudioLayer(AudioDeviceModule::kAndroidOpenSLESAudio);
+ EXPECT_EQ(output_parameters_.frames_per_buffer(),
+ input_parameters_.frames_per_buffer());
+}
+
+TEST_F(AudioDeviceTest, ShowAudioParameterInfo) {
+ const bool low_latency_out = false;
+ const bool low_latency_in = false;
+ PRINT("PLAYOUT:\n");
+ PRINT("%saudio layer: %s\n", kTag,
+ low_latency_out ? "Low latency OpenSL" : "Java/JNI based AudioTrack");
+ PRINT("%ssample rate: %d Hz\n", kTag, output_parameters_.sample_rate());
+ PRINT("%schannels: %zu\n", kTag, output_parameters_.channels());
+ PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag,
+ output_parameters_.frames_per_buffer(),
+ output_parameters_.GetBufferSizeInMilliseconds());
+ PRINT("RECORD: \n");
+ PRINT("%saudio layer: %s\n", kTag,
+ low_latency_in ? "Low latency OpenSL" : "Java/JNI based AudioRecord");
+ PRINT("%ssample rate: %d Hz\n", kTag, input_parameters_.sample_rate());
+ PRINT("%schannels: %zu\n", kTag, input_parameters_.channels());
+ PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag,
+ input_parameters_.frames_per_buffer(),
+ input_parameters_.GetBufferSizeInMilliseconds());
+}
+
+// Add device-specific information to the test for logging purposes.
+TEST_F(AudioDeviceTest, ShowDeviceInfo) {
+ std::string model =
+ JavaToNativeString(jni_, Java_BuildInfo_getDeviceModel(jni_));
+ std::string brand = JavaToNativeString(jni_, Java_BuildInfo_getBrand(jni_));
+ std::string manufacturer =
+ JavaToNativeString(jni_, Java_BuildInfo_getDeviceManufacturer(jni_));
+
+ PRINT("%smodel: %s\n", kTag, model.c_str());
+ PRINT("%sbrand: %s\n", kTag, brand.c_str());
+ PRINT("%smanufacturer: %s\n", kTag, manufacturer.c_str());
+}
+
+// Add Android build information to the test for logging purposes.
+TEST_F(AudioDeviceTest, ShowBuildInfo) {
+ std::string release =
+ JavaToNativeString(jni_, Java_BuildInfo_getBuildRelease(jni_));
+ std::string build_id =
+ JavaToNativeString(jni_, Java_BuildInfo_getAndroidBuildId(jni_));
+ std::string build_type =
+ JavaToNativeString(jni_, Java_BuildInfo_getBuildType(jni_));
+ int sdk = Java_BuildInfo_getSdkVersion(jni_);
+
+ PRINT("%sbuild release: %s\n", kTag, release.c_str());
+ PRINT("%sbuild id: %s\n", kTag, build_id.c_str());
+ PRINT("%sbuild type: %s\n", kTag, build_type.c_str());
+ PRINT("%sSDK version: %d\n", kTag, sdk);
+}
+
+// Basic test of the AudioParameters class using default construction where
+// all members are set to zero.
+TEST_F(AudioDeviceTest, AudioParametersWithDefaultConstruction) {
+ AudioParameters params;
+ EXPECT_FALSE(params.is_valid());
+ EXPECT_EQ(0, params.sample_rate());
+ EXPECT_EQ(0U, params.channels());
+ EXPECT_EQ(0U, params.frames_per_buffer());
+ EXPECT_EQ(0U, params.frames_per_10ms_buffer());
+ EXPECT_EQ(0U, params.GetBytesPerFrame());
+ EXPECT_EQ(0U, params.GetBytesPerBuffer());
+ EXPECT_EQ(0U, params.GetBytesPer10msBuffer());
+ EXPECT_EQ(0.0f, params.GetBufferSizeInMilliseconds());
+}
+
+// Basic test of the AudioParameters class using non default construction.
+TEST_F(AudioDeviceTest, AudioParametersWithNonDefaultConstruction) {
+ const int kSampleRate = 48000;
+ const size_t kChannels = 1;
+ const size_t kFramesPerBuffer = 480;
+ const size_t kFramesPer10msBuffer = 480;
+ const size_t kBytesPerFrame = 2;
+ const float kBufferSizeInMs = 10.0f;
+ AudioParameters params(kSampleRate, kChannels, kFramesPerBuffer);
+ EXPECT_TRUE(params.is_valid());
+ EXPECT_EQ(kSampleRate, params.sample_rate());
+ EXPECT_EQ(kChannels, params.channels());
+ EXPECT_EQ(kFramesPerBuffer, params.frames_per_buffer());
+ EXPECT_EQ(static_cast<size_t>(kSampleRate / 100),
+ params.frames_per_10ms_buffer());
+ EXPECT_EQ(kBytesPerFrame, params.GetBytesPerFrame());
+ EXPECT_EQ(kBytesPerFrame * kFramesPerBuffer, params.GetBytesPerBuffer());
+ EXPECT_EQ(kBytesPerFrame * kFramesPer10msBuffer,
+ params.GetBytesPer10msBuffer());
+ EXPECT_EQ(kBufferSizeInMs, params.GetBufferSizeInMilliseconds());
+}
+
+// Start playout and recording and store recorded data in an intermediate FIFO
+// buffer from which the playout side then reads its samples in the same order
+// as they were stored. Under ideal circumstances, a callback sequence would
+// look like: ...+-+-+-+-+-+-+-..., where '+' means 'packet recorded' and '-'
+// means 'packet played'. Under such conditions, the FIFO would only contain
+// one packet on average. However, under more realistic conditions, the size
+// of the FIFO will vary more due to an unbalance between the two sides.
+// This test tries to verify that the device maintains a balanced callback-
+// sequence by running in loopback for kFullDuplexTimeInSec seconds while
+// measuring the size (max and average) of the FIFO. The size of the FIFO is
+// increased by the recording side and decreased by the playout side.
+// TODO(henrika): tune the final test parameters after running tests on several
+// different devices.
+// Disabling this test on bots since it is difficult to come up with a robust
+// test condition that all worked as intended. The main issue is that, when
+// swarming is used, an initial latency can be built up when the both sides
+// starts at different times. Hence, the test can fail even if audio works
+// as intended. Keeping the test so it can be enabled manually.
+// http://bugs.webrtc.org/7744
+TEST_F(AudioDeviceTest, DISABLED_RunPlayoutAndRecordingInFullDuplex) {
+ EXPECT_EQ(record_channels(), playout_channels());
+ EXPECT_EQ(record_sample_rate(), playout_sample_rate());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout | kRecording);
+ std::unique_ptr<FifoAudioStream> fifo_audio_stream(
+ new FifoAudioStream(playout_frames_per_10ms_buffer()));
+ mock.HandleCallbacks(&test_is_done_, fifo_audio_stream.get(),
+ kFullDuplexTimeInSec * kNumCallbacksPerSecond);
+ SetMaxPlayoutVolume();
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ StartRecording();
+ StartPlayout();
+ test_is_done_.Wait(
+ std::max(kTestTimeOutInMilliseconds, 1000 * kFullDuplexTimeInSec));
+ StopPlayout();
+ StopRecording();
+
+ // These thresholds are set rather high to accomodate differences in hardware
+ // in several devices, so this test can be used in swarming.
+ // See http://bugs.webrtc.org/6464
+ EXPECT_LE(fifo_audio_stream->average_size(), 60u);
+ EXPECT_LE(fifo_audio_stream->largest_size(), 70u);
+}
+
+// Measures loopback latency and reports the min, max and average values for
+// a full duplex audio session.
+// The latency is measured like so:
+// - Insert impulses periodically on the output side.
+// - Detect the impulses on the input side.
+// - Measure the time difference between the transmit time and receive time.
+// - Store time differences in a vector and calculate min, max and average.
+// This test requires a special hardware called Audio Loopback Dongle.
+// See http://source.android.com/devices/audio/loopback.html for details.
+TEST_F(AudioDeviceTest, DISABLED_MeasureLoopbackLatency) {
+ EXPECT_EQ(record_channels(), playout_channels());
+ EXPECT_EQ(record_sample_rate(), playout_sample_rate());
+ NiceMock<MockAudioTransportAndroid> mock(kPlayout | kRecording);
+ std::unique_ptr<LatencyMeasuringAudioStream> latency_audio_stream(
+ new LatencyMeasuringAudioStream(playout_frames_per_10ms_buffer()));
+ mock.HandleCallbacks(&test_is_done_, latency_audio_stream.get(),
+ kMeasureLatencyTimeInSec * kNumCallbacksPerSecond);
+ EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock));
+ SetMaxPlayoutVolume();
+ DisableBuiltInAECIfAvailable();
+ StartRecording();
+ StartPlayout();
+ test_is_done_.Wait(
+ std::max(kTestTimeOutInMilliseconds, 1000 * kMeasureLatencyTimeInSec));
+ StopPlayout();
+ StopRecording();
+ // Verify that the correct number of transmitted impulses are detected.
+ EXPECT_EQ(latency_audio_stream->num_latency_values(),
+ static_cast<size_t>(
+ kImpulseFrequencyInHz * kMeasureLatencyTimeInSec - 1));
+ latency_audio_stream->PrintResults();
+}
+
+TEST(JavaAudioDeviceTest, TestRunningTwoAdmsSimultaneously) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> context = test::GetAppContextForTest(jni);
+
+ // Create and start the first ADM.
+ rtc::scoped_refptr<AudioDeviceModule> adm_1 =
+ CreateJavaAudioDeviceModule(jni, context.obj());
+ EXPECT_EQ(0, adm_1->Init());
+ EXPECT_EQ(0, adm_1->InitRecording());
+ EXPECT_EQ(0, adm_1->StartRecording());
+
+ // Create and start a second ADM. Expect this to fail due to the microphone
+ // already being in use.
+ rtc::scoped_refptr<AudioDeviceModule> adm_2 =
+ CreateJavaAudioDeviceModule(jni, context.obj());
+ int32_t err = adm_2->Init();
+ err |= adm_2->InitRecording();
+ err |= adm_2->StartRecording();
+ EXPECT_NE(0, err);
+
+ // Stop and terminate second adm.
+ adm_2->StopRecording();
+ adm_2->Terminate();
+
+ // Stop first ADM.
+ EXPECT_EQ(0, adm_1->StopRecording());
+ EXPECT_EQ(0, adm_1->Terminate());
+}
+
+} // namespace jni
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS
new file mode 100644
index 0000000000..fb2c30fab1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/codecs/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+media/base/media_constants.h",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc
new file mode 100644
index 0000000000..c858095d05
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/codecs/wrapper_unittest.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "absl/memory/memory.h"
+#include "media/base/media_constants.h"
+#include "sdk/android/generated_native_unittests_jni/CodecsWrapperTestHelper_jni.h"
+#include "sdk/android/native_api/codecs/wrapper.h"
+#include "sdk/android/src/jni/video_encoder_wrapper.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+TEST(JavaCodecsWrapperTest, JavaToNativeVideoCodecInfo) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_video_codec_info =
+ jni::Java_CodecsWrapperTestHelper_createTestVideoCodecInfo(env);
+
+ const SdpVideoFormat video_format =
+ JavaToNativeVideoCodecInfo(env, j_video_codec_info.obj());
+
+ EXPECT_EQ(cricket::kH264CodecName, video_format.name);
+ const auto it =
+ video_format.parameters.find(cricket::kH264FmtpProfileLevelId);
+ ASSERT_NE(it, video_format.parameters.end());
+ EXPECT_EQ(cricket::kH264ProfileLevelConstrainedBaseline, it->second);
+}
+
+TEST(JavaCodecsWrapperTest, JavaToNativeResolutionBitrateLimits) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_fake_encoder =
+ jni::Java_CodecsWrapperTestHelper_createFakeVideoEncoder(env);
+
+ auto encoder = jni::JavaToNativeVideoEncoder(env, j_fake_encoder);
+ ASSERT_TRUE(encoder);
+
+ // Check that the bitrate limits correctly passed from Java to native.
+ const std::vector<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
+ encoder->GetEncoderInfo().resolution_bitrate_limits;
+ ASSERT_EQ(bitrate_limits.size(), 1u);
+ EXPECT_EQ(bitrate_limits[0].frame_size_pixels, 640 * 360);
+ EXPECT_EQ(bitrate_limits[0].min_start_bitrate_bps, 300000);
+ EXPECT_EQ(bitrate_limits[0].min_bitrate_bps, 200000);
+ EXPECT_EQ(bitrate_limits[0].max_bitrate_bps, 1000000);
+}
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc
new file mode 100644
index 0000000000..4e7a6ed7ca
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/java_types_unittest.cc
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "sdk/android/generated_native_unittests_jni/JavaTypesTestHelper_jni.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+TEST(JavaTypesTest, TestJavaToNativeStringMap) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ ScopedJavaLocalRef<jobject> j_map =
+ jni::Java_JavaTypesTestHelper_createTestStringMap(env);
+
+ std::map<std::string, std::string> output = JavaToNativeStringMap(env, j_map);
+
+ std::map<std::string, std::string> expected{
+ {"one", "1"}, {"two", "2"}, {"three", "3"},
+ };
+ EXPECT_EQ(expected, output);
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeIntArray) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int32_t> test_data{1, 20, 300};
+
+ ScopedJavaLocalRef<jintArray> array = NativeToJavaIntArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeIntArray(env, array));
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeByteArray) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int8_t> test_data{1, 20, 30};
+
+ ScopedJavaLocalRef<jbyteArray> array = NativeToJavaByteArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeByteArray(env, array));
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeIntArrayLeakTest) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int32_t> test_data{1, 20, 300};
+
+ for (int i = 0; i < 2000; i++) {
+ ScopedJavaLocalRef<jintArray> array = NativeToJavaIntArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeIntArray(env, array));
+ }
+}
+
+TEST(JavaTypesTest, TestNativeToJavaToNativeByteArrayLeakTest) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+
+ std::vector<int8_t> test_data{1, 20, 30};
+
+ for (int i = 0; i < 2000; i++) {
+ ScopedJavaLocalRef<jbyteArray> array =
+ NativeToJavaByteArray(env, test_data);
+ EXPECT_EQ(test_data, JavaToNativeByteArray(env, array));
+ }
+}
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java
new file mode 100644
index 0000000000..e10d34710d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/ApplicationContextProvider.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+public class ApplicationContextProvider {
+ @CalledByNative
+ public static Context getApplicationContextForTest() {
+ return ContextUtils.getApplicationContext();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java
new file mode 100644
index 0000000000..0440ae4209
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/BuildInfo.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Build;
+import org.webrtc.CalledByNative;
+
+public final class BuildInfo {
+ public static String getDevice() {
+ return Build.DEVICE;
+ }
+
+ @CalledByNative
+ public static String getDeviceModel() {
+ return Build.MODEL;
+ }
+
+ public static String getProduct() {
+ return Build.PRODUCT;
+ }
+
+ @CalledByNative
+ public static String getBrand() {
+ return Build.BRAND;
+ }
+
+ @CalledByNative
+ public static String getDeviceManufacturer() {
+ return Build.MANUFACTURER;
+ }
+
+ @CalledByNative
+ public static String getAndroidBuildId() {
+ return Build.ID;
+ }
+
+ @CalledByNative
+ public static String getBuildType() {
+ return Build.TYPE;
+ }
+
+ @CalledByNative
+ public static String getBuildRelease() {
+ return Build.VERSION.RELEASE;
+ }
+
+ @CalledByNative
+ public static int getSdkVersion() {
+ return Build.VERSION.SDK_INT;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java
new file mode 100644
index 0000000000..70151d3b78
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class CodecsWrapperTestHelper {
+ @CalledByNative
+ public static VideoCodecInfo createTestVideoCodecInfo() {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(
+ VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
+
+ VideoCodecInfo codec_info = new VideoCodecInfo("H264", params);
+ return codec_info;
+ }
+
+ @CalledByNative
+ public static VideoEncoder createFakeVideoEncoder() {
+ return new FakeVideoEncoder();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java
new file mode 100644
index 0000000000..513f145518
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/FakeVideoEncoder.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.VideoEncoder;
+
+/**
+ * An implementation of VideoEncoder that is used for testing of functionalities of
+ * VideoEncoderWrapper.
+ */
+class FakeVideoEncoder implements VideoEncoder {
+ @Override
+ public VideoCodecStatus initEncode(Settings settings, Callback encodeCallback) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus release() {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus encode(VideoFrame frame, EncodeInfo info) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate) {
+ return VideoCodecStatus.OK;
+ }
+
+ @Override
+ public ScalingSettings getScalingSettings() {
+ return ScalingSettings.OFF;
+ }
+
+ @Override
+ public ResolutionBitrateLimits[] getResolutionBitrateLimits() {
+ ResolutionBitrateLimits resolution_bitrate_limits[] = {
+ new ResolutionBitrateLimits(/* frameSizePixels = */ 640 * 360,
+ /* minStartBitrateBps = */ 300000,
+ /* minBitrateBps = */ 200000,
+ /* maxBitrateBps = */ 1000000)};
+
+ return resolution_bitrate_limits;
+ }
+
+ @Override
+ public String getImplementationName() {
+ return "FakeVideoEncoder";
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java
new file mode 100644
index 0000000000..6695ef79af
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaTypesTestHelper.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class JavaTypesTestHelper {
+ @CalledByNative
+ public static Map createTestStringMap() {
+ Map<String, String> testMap = new HashMap<String, String>();
+ testMap.put("one", "1");
+ testMap.put("two", "2");
+ testMap.put("three", "3");
+ return testMap;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java
new file mode 100644
index 0000000000..2803acb450
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/JavaVideoSourceTestHelper.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class JavaVideoSourceTestHelper {
+ @CalledByNative
+ public static void startCapture(CapturerObserver observer, boolean success) {
+ observer.onCapturerStarted(success);
+ }
+
+ @CalledByNative
+ public static void stopCapture(CapturerObserver observer) {
+ observer.onCapturerStopped();
+ }
+
+ @CalledByNative
+ public static void deliverFrame(
+ int width, int height, int rotation, long timestampNs, CapturerObserver observer) {
+ observer.onFrameCaptured(
+ new VideoFrame(JavaI420Buffer.allocate(width, height), rotation, timestampNs));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java
new file mode 100644
index 0000000000..445a6733ea
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import org.webrtc.PeerConnectionFactory.InitializationOptions;
+
+public class PeerConnectionFactoryInitializationHelper {
+ private static class MockLoader implements NativeLibraryLoader {
+ @Override
+ public boolean load(String name) {
+ return true;
+ }
+ }
+
+ @CalledByNative
+ public static void initializeFactoryForTests() {
+ Context ctx = ContextUtils.getApplicationContext();
+ InitializationOptions options = InitializationOptions.builder(ctx)
+ .setNativeLibraryLoader(new MockLoader())
+ .createInitializationOptions();
+
+ PeerConnectionFactory.initialize(options);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS
new file mode 100644
index 0000000000..ed77eb5d6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/DEPS
@@ -0,0 +1,6 @@
+include_rules = [
+ "+logging/rtc_event_log/rtc_event_log_factory.h",
+ "+media/base",
+ "+media/engine",
+ "+modules/audio_processing/include/audio_processing.h",
+]
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
new file mode 100644
index 0000000000..8bb6e33e65
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/peerconnection/peer_connection_factory_unittest.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "sdk/android/native_api/peerconnection/peer_connection_factory.h"
+
+#include <memory>
+
+#include "api/rtc_event_log/rtc_event_log_factory.h"
+#include "api/task_queue/default_task_queue_factory.h"
+#include "media/base/media_engine.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/webrtc_media_engine.h"
+#include "media/engine/webrtc_media_engine_defaults.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/physical_socket_server.h"
+#include "rtc_base/thread.h"
+#include "sdk/android/generated_native_unittests_jni/PeerConnectionFactoryInitializationHelper_jni.h"
+#include "sdk/android/native_api/audio_device_module/audio_device_android.h"
+#include "sdk/android/native_api/jni/jvm.h"
+#include "sdk/android/native_unittests/application_context_provider.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+namespace {
+
+// Create native peer connection factory, that will be wrapped by java one
+rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> CreateTestPCF(
+ JNIEnv* jni,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
+ // think about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ PeerConnectionFactoryDependencies pcf_deps;
+ pcf_deps.network_thread = network_thread;
+ pcf_deps.worker_thread = worker_thread;
+ pcf_deps.signaling_thread = signaling_thread;
+ pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory();
+ pcf_deps.call_factory = CreateCallFactory();
+ pcf_deps.event_log_factory =
+ std::make_unique<RtcEventLogFactory>(pcf_deps.task_queue_factory.get());
+
+ cricket::MediaEngineDependencies media_deps;
+ media_deps.task_queue_factory = pcf_deps.task_queue_factory.get();
+ media_deps.adm =
+ CreateJavaAudioDeviceModule(jni, GetAppContextForTest(jni).obj());
+ media_deps.video_encoder_factory =
+ std::make_unique<webrtc::InternalEncoderFactory>();
+ media_deps.video_decoder_factory =
+ std::make_unique<webrtc::InternalDecoderFactory>();
+ SetMediaEngineDefaults(&media_deps);
+ pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
+ RTC_LOG(LS_INFO) << "Media engine created: " << pcf_deps.media_engine.get();
+
+ auto factory = CreateModularPeerConnectionFactory(std::move(pcf_deps));
+ RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << factory.get();
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ "WebRTC/libjingle init likely failed on this device";
+
+ return factory;
+}
+
+TEST(PeerConnectionFactoryTest, NativeToJavaPeerConnectionFactory) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+
+ RTC_LOG(LS_INFO) << "Initializing java peer connection factory.";
+ jni::Java_PeerConnectionFactoryInitializationHelper_initializeFactoryForTests(
+ jni);
+ RTC_LOG(LS_INFO) << "Java peer connection factory initialized.";
+
+ auto socket_server = std::make_unique<rtc::PhysicalSocketServer>();
+
+ // Create threads.
+ auto network_thread = std::make_unique<rtc::Thread>(socket_server.get());
+ network_thread->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
+ worker_thread->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
+
+ rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory =
+ CreateTestPCF(jni, network_thread.get(), worker_thread.get(),
+ signaling_thread.get());
+
+ jobject java_factory = NativeToJavaPeerConnectionFactory(
+ jni, factory, std::move(socket_server), std::move(network_thread),
+ std::move(worker_thread), std::move(signaling_thread));
+
+ RTC_LOG(LS_INFO) << java_factory;
+
+ EXPECT_NE(java_factory, nullptr);
+}
+
+} // namespace
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
new file mode 100644
index 0000000000..5cbd4aafe1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/stacktrace/stacktrace_unittest.cc
@@ -0,0 +1,275 @@
+/*
+ * Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sdk/android/native_api/stacktrace/stacktrace.h"
+
+#include <dlfcn.h>
+
+#include <atomic>
+#include <memory>
+#include <vector>
+
+#include "absl/strings/string_view.h"
+#include "rtc_base/event.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/platform_thread.h"
+#include "rtc_base/string_utils.h"
+#include "rtc_base/strings/string_builder.h"
+#include "rtc_base/synchronization/mutex.h"
+#include "rtc_base/system/inline.h"
+#include "system_wrappers/include/sleep.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+
+// A simple atomic spin event. Implemented with std::atomic_flag, since the C++
+// standard guarantees that that type is implemented with actual atomic
+// instructions (as opposed to e.g. with a mutex). Uses sequentially consistent
+// memory order since this is a test, where simplicity trumps performance.
+class SimpleSpinEvent {
+ public:
+ // Initialize the event to its blocked state.
+ SimpleSpinEvent() {
+ static_cast<void>(blocked_.test_and_set(std::memory_order_seq_cst));
+ }
+
+ // Busy-wait for the event to become unblocked, and block it behind us as we
+ // leave.
+ void Wait() {
+ bool was_blocked;
+ do {
+ // Check if the event was blocked, and set it to blocked.
+ was_blocked = blocked_.test_and_set(std::memory_order_seq_cst);
+ } while (was_blocked);
+ }
+
+ // Unblock the event.
+ void Set() { blocked_.clear(std::memory_order_seq_cst); }
+
+ private:
+ std::atomic_flag blocked_;
+};
+
+// Returns the execution address relative to the .so base address. This matches
+// the addresses we get from GetStacktrace().
+RTC_NO_INLINE uint32_t GetCurrentRelativeExecutionAddress() {
+ void* pc = __builtin_return_address(0);
+ Dl_info dl_info = {};
+ const bool success = dladdr(pc, &dl_info);
+ EXPECT_TRUE(success);
+ return static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pc) -
+ reinterpret_cast<uintptr_t>(dl_info.dli_fbase));
+}
+
+// Returns true if any of the stack trace element is inside the specified
+// region.
+bool StackTraceContainsRange(const std::vector<StackTraceElement>& stack_trace,
+ uintptr_t pc_low,
+ uintptr_t pc_high) {
+ for (const StackTraceElement& stack_trace_element : stack_trace) {
+ if (pc_low <= stack_trace_element.relative_address &&
+ pc_high >= stack_trace_element.relative_address) {
+ return true;
+ }
+ }
+ return false;
+}
+
+class DeadlockInterface {
+ public:
+ virtual ~DeadlockInterface() {}
+
+ // This function should deadlock until Release() is called.
+ virtual void Deadlock() = 0;
+
+ // This function should release the thread stuck in Deadlock().
+ virtual void Release() = 0;
+};
+
+struct ThreadParams {
+ volatile int tid;
+ // Signaled when the deadlock region is entered.
+ SimpleSpinEvent deadlock_start_event;
+ DeadlockInterface* volatile deadlock_impl;
+ // Defines an address range within the deadlock will occur.
+ volatile uint32_t deadlock_region_start_address;
+ volatile uint32_t deadlock_region_end_address;
+ // Signaled when the deadlock is done.
+ rtc::Event deadlock_done_event;
+};
+
+class RtcEventDeadlock : public DeadlockInterface {
+ private:
+ void Deadlock() override { event.Wait(rtc::Event::kForever); }
+ void Release() override { event.Set(); }
+
+ rtc::Event event;
+};
+
+class RtcCriticalSectionDeadlock : public DeadlockInterface {
+ public:
+ RtcCriticalSectionDeadlock()
+ : mutex_lock_(std::make_unique<MutexLock>(&mutex_)) {}
+
+ private:
+ void Deadlock() override { MutexLock lock(&mutex_); }
+
+ void Release() override { mutex_lock_.reset(); }
+
+ Mutex mutex_;
+ std::unique_ptr<MutexLock> mutex_lock_;
+};
+
+class SpinDeadlock : public DeadlockInterface {
+ public:
+ SpinDeadlock() : is_deadlocked_(true) {}
+
+ private:
+ void Deadlock() override {
+ while (is_deadlocked_) {
+ }
+ }
+
+ void Release() override { is_deadlocked_ = false; }
+
+ std::atomic<bool> is_deadlocked_;
+};
+
+class SleepDeadlock : public DeadlockInterface {
+ private:
+ void Deadlock() override { sleep(1000000); }
+
+ void Release() override {
+ // The interrupt itself will break free from the sleep.
+ }
+};
+
+void TestStacktrace(std::unique_ptr<DeadlockInterface> deadlock_impl) {
+ // Set params that will be sent to other thread.
+ ThreadParams params;
+ params.deadlock_impl = deadlock_impl.get();
+
+ // Spawn thread.
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&params] {
+ params.tid = gettid();
+ params.deadlock_region_start_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_start_event.Set();
+ params.deadlock_impl->Deadlock();
+ params.deadlock_region_end_address =
+ GetCurrentRelativeExecutionAddress();
+ params.deadlock_done_event.Set();
+ },
+ "StacktraceTest");
+
+ // Wait until the thread has entered the deadlock region, and take a very
+ // brief nap to give it time to reach the actual deadlock.
+ params.deadlock_start_event.Wait();
+ SleepMs(1);
+
+ // Acquire the stack trace of the thread which should now be deadlocking.
+ std::vector<StackTraceElement> stack_trace = GetStackTrace(params.tid);
+
+ // Release the deadlock so that the thread can continue.
+ deadlock_impl->Release();
+
+ // Wait until the thread has left the deadlock.
+ params.deadlock_done_event.Wait(rtc::Event::kForever);
+
+ // Assert that the stack trace contains the deadlock region.
+ EXPECT_TRUE(StackTraceContainsRange(stack_trace,
+ params.deadlock_region_start_address,
+ params.deadlock_region_end_address))
+ << "Deadlock region: ["
+ << rtc::ToHex(params.deadlock_region_start_address) << ", "
+ << rtc::ToHex(params.deadlock_region_end_address)
+ << "] not contained in: " << StackTraceToString(stack_trace);
+}
+
+class LookoutLogSink final : public rtc::LogSink {
+ public:
+ explicit LookoutLogSink(std::string look_for)
+ : look_for_(std::move(look_for)) {}
+ void OnLogMessage(const std::string& message) override {
+ OnLogMessage(absl::string_view(message));
+ }
+ void OnLogMessage(absl::string_view message) override {
+ if (message.find(look_for_) != std::string::npos) {
+ when_found_.Set();
+ }
+ }
+ rtc::Event& WhenFound() { return when_found_; }
+
+ private:
+ const std::string look_for_;
+ rtc::Event when_found_;
+};
+
+} // namespace
+
+TEST(Stacktrace, TestCurrentThread) {
+ const uint32_t start_addr = GetCurrentRelativeExecutionAddress();
+ const std::vector<StackTraceElement> stack_trace = GetStackTrace();
+ const uint32_t end_addr = GetCurrentRelativeExecutionAddress();
+ EXPECT_TRUE(StackTraceContainsRange(stack_trace, start_addr, end_addr))
+ << "Caller region: [" << rtc::ToHex(start_addr) << ", "
+ << rtc::ToHex(end_addr)
+ << "] not contained in: " << StackTraceToString(stack_trace);
+}
+
+TEST(Stacktrace, TestSpinLock) {
+ TestStacktrace(std::make_unique<SpinDeadlock>());
+}
+
+TEST(Stacktrace, TestSleep) {
+ TestStacktrace(std::make_unique<SleepDeadlock>());
+}
+
+// Stack traces originating from kernel space does not include user space stack
+// traces for ARM 32.
+#ifdef WEBRTC_ARCH_ARM64
+
+TEST(Stacktrace, TestRtcEvent) {
+ TestStacktrace(std::make_unique<RtcEventDeadlock>());
+}
+
+TEST(Stacktrace, TestRtcCriticalSection) {
+ TestStacktrace(std::make_unique<RtcCriticalSectionDeadlock>());
+}
+
+#endif
+
+TEST(Stacktrace, TestRtcEventDeadlockDetection) {
+ // Start looking for the expected log output.
+ LookoutLogSink sink(/*look_for=*/"Probable deadlock");
+ rtc::LogMessage::AddLogToStream(&sink, rtc::LS_WARNING);
+
+ // Start a thread that waits for an event.
+ rtc::Event ev;
+ auto thread = rtc::PlatformThread::SpawnJoinable(
+ [&ev] { ev.Wait(rtc::Event::kForever); },
+ "TestRtcEventDeadlockDetection");
+
+ // The message should appear after 3 sec. We'll wait up to 10 sec in an
+ // attempt to not be flaky.
+ EXPECT_TRUE(sink.WhenFound().Wait(10000));
+
+ // Unblock the thread and shut it down.
+ ev.Set();
+ thread.Finalize();
+ rtc::LogMessage::RemoveLogToStream(&sink);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc b/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc
new file mode 100644
index 0000000000..dafe49c474
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/test_jni_onload.cc
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "rtc_base/checks.h"
+#include "sdk/android/native_api/base/init.h"
+#include "sdk/android/native_api/jni/java_types.h"
+
+// This is called by the VM when the shared library is first loaded.
+JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ webrtc::InitAndroid(vm);
+ return JNI_VERSION_1_4;
+}
diff --git a/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc b/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc
new file mode 100644
index 0000000000..3c4eed1fc3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/native_unittests/video/video_source_unittest.cc
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "api/video/video_sink_interface.h"
+#include "sdk/android/generated_native_unittests_jni/JavaVideoSourceTestHelper_jni.h"
+#include "sdk/android/native_api/video/video_source.h"
+#include "test/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+class TestVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ void OnFrame(const VideoFrame& frame) { frames_.push_back(frame); }
+
+ std::vector<VideoFrame> GetFrames() {
+ std::vector<VideoFrame> temp = frames_;
+ frames_.clear();
+ return temp;
+ }
+
+ private:
+ std::vector<VideoFrame> frames_;
+};
+} // namespace
+
+TEST(JavaVideoSourceTest, CreateJavaVideoSource) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ ASSERT_NE(nullptr, video_track_source);
+ EXPECT_NE(nullptr,
+ video_track_source->GetJavaVideoCapturerObserver(env).obj());
+}
+
+TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
+ TestVideoSink test_video_sink;
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+ video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ const int width = 20;
+ const int height = 32;
+ const int rotation = 180;
+ const int64_t timestamp = 987654321;
+ jni::Java_JavaVideoSourceTestHelper_deliverFrame(
+ env, width, height, rotation, timestamp,
+ video_track_source->GetJavaVideoCapturerObserver(env));
+
+ std::vector<VideoFrame> frames = test_video_sink.GetFrames();
+ ASSERT_EQ(1u, frames.size());
+ webrtc::VideoFrame frame = frames[0];
+ EXPECT_EQ(width, frame.width());
+ EXPECT_EQ(height, frame.height());
+ EXPECT_EQ(rotation, frame.rotation());
+}
+
+TEST(JavaVideoSourceTest,
+ OnFrameCapturedFrameIsDeliveredToSinkWithPreservedTimestamp) {
+ TestVideoSink test_video_sink;
+
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, false /* align_timestamps */);
+ video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ const int width = 20;
+ const int height = 32;
+ const int rotation = 180;
+ const int64_t timestamp = 987654321;
+ jni::Java_JavaVideoSourceTestHelper_deliverFrame(
+ env, width, height, rotation, 987654321,
+ video_track_source->GetJavaVideoCapturerObserver(env));
+
+ std::vector<VideoFrame> frames = test_video_sink.GetFrames();
+ ASSERT_EQ(1u, frames.size());
+ webrtc::VideoFrame frame = frames[0];
+ EXPECT_EQ(width, frame.width());
+ EXPECT_EQ(height, frame.height());
+ EXPECT_EQ(rotation, frame.rotation());
+ EXPECT_EQ(timestamp / 1000, frame.timestamp_us());
+}
+
+TEST(JavaVideoSourceTest, CapturerStartedSuccessStateBecomesLive) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kLive,
+ video_track_source->state());
+}
+
+TEST(JavaVideoSourceTest, CapturerStartedFailureStateBecomesEnded) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ false /* success */);
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
+ video_track_source->state());
+}
+
+TEST(JavaVideoSourceTest, CapturerStoppedStateBecomesEnded) {
+ JNIEnv* env = AttachCurrentThreadIfNeeded();
+ // Wrap test thread so it can be used as the signaling thread.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+
+ rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
+ CreateJavaVideoSource(
+ env, rtc::ThreadManager::Instance()->CurrentThread(),
+ false /* is_screencast */, true /* align_timestamps */);
+
+ jni::Java_JavaVideoSourceTestHelper_startCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env),
+ true /* success */);
+ jni::Java_JavaVideoSourceTestHelper_stopCapture(
+ env, video_track_source->GetJavaVideoCapturerObserver(env));
+
+ EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
+ video_track_source->state());
+}
+
+} // namespace test
+} // namespace webrtc