summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/sdk/android/instrumentationtests
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/sdk/android/instrumentationtests')
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml38
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties18
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/build.xml92
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc31
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/project.properties16
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java200
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java54
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java205
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java208
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java334
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java793
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java109
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java366
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java129
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java318
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java507
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java161
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java411
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java1641
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java65
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java215
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java150
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java70
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java77
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java67
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java518
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java241
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java15
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java43
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java88
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java530
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java112
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java31
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java207
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m5
-rw-r--r--third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc45
36 files changed, 8110 insertions, 0 deletions
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml b/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml
new file mode 100644
index 0000000000..55028da703
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/AndroidManifest.xml
@@ -0,0 +1,38 @@
+<!--
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+-->
+
+<manifest
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:tools="http://schemas.android.com/tools"
+ package="org.webrtc">
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" />
+ <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+ <uses-sdk android:minSdkVersion="21" android:targetSdkVersion="21" />
+
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.INTERNET" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
+ <uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
+
+ <application>
+ <uses-library android:name="android.test.runner" />
+ </application>
+
+ <instrumentation android:name="org.chromium.base.test.BaseChromiumAndroidJUnitRunner"
+ tools:ignore="MissingPrefix"
+ android:targetPackage="org.webrtc"
+ android:label="Tests for WebRTC Android SDK"/>
+</manifest>
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties b/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties
new file mode 100644
index 0000000000..bc05353865
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/ant.properties
@@ -0,0 +1,18 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+# 'source.dir' for the location of your java source folder and
+# 'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+# 'key.store' for the location of your keystore and
+# 'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
+source.dir=../java/testcommon/src;src \ No newline at end of file
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml b/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml
new file mode 100644
index 0000000000..cb4cb7ac94
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="libjingle_peerconnection_android_unittest" default="help">
+
+ <!-- The local.properties file is created and updated by the 'android' tool.
+ It contains the path to the SDK. It should *NOT* be checked into
+ Version Control Systems. -->
+ <property file="local.properties" />
+
+ <!-- The ant.properties file can be created by you. It is only edited by the
+ 'android' tool to add properties to it.
+ This is the place to change some Ant specific build properties.
+ Here are some properties you may want to change/update:
+
+ source.dir
+ The name of the source directory. Default is 'src'.
+ out.dir
+ The name of the output directory. Default is 'bin'.
+
+ For other overridable properties, look at the beginning of the rules
+ files in the SDK, at tools/ant/build.xml
+
+ Properties related to the SDK location or the project target should
+ be updated using the 'android' tool with the 'update' action.
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems.
+
+ -->
+ <property file="ant.properties" />
+
+ <!-- if sdk.dir was not set from one of the property file, then
+ get it from the ANDROID_HOME env var.
+ This must be done before we load project.properties since
+ the proguard config can use sdk.dir -->
+ <property environment="env" />
+ <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+ <isset property="env.ANDROID_SDK_ROOT" />
+ </condition>
+
+ <!-- The project.properties file is created and updated by the 'android'
+ tool, as well as ADT.
+
+ This contains project specific properties such as project target, and library
+ dependencies. Lower level build properties are stored in ant.properties
+ (or in .classpath for Eclipse projects).
+
+ This file is an integral part of the build system for your
+ application and should be checked into Version Control Systems. -->
+ <loadproperties srcFile="project.properties" />
+
+ <!-- quick check on sdk.dir -->
+ <fail
+ message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+ unless="sdk.dir"
+ />
+
+ <!--
+ Import per project custom build rules if present at the root of the project.
+ This is the place to put custom intermediary targets such as:
+ -pre-build
+ -pre-compile
+ -post-compile (This is typically used for code obfuscation.
+ Compiled code location: ${out.classes.absolute.dir}
+ If this is not done in place, override ${out.dex.input.absolute.dir})
+ -post-package
+ -post-build
+ -pre-clean
+ -->
+ <import file="custom_rules.xml" optional="true" />
+
+ <!-- Import the actual build file.
+
+ To customize existing targets, there are two options:
+ - Customize only one target:
+ - copy/paste the target into this file, *before* the
+ <import> task.
+ - customize it to your needs.
+ - Customize the whole content of build.xml
+ - copy/paste the content of the rules files (minus the top node)
+ into this file, replacing the <import> task.
+ - customize to your needs.
+
+ ***********************
+ ****** IMPORTANT ******
+ ***********************
+ In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+ in order to avoid having your file be overridden by tools such as "android update project"
+ -->
+ <!-- version-tag: 1 -->
+ <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc b/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc
new file mode 100644
index 0000000000..1a11075216
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/loggable_test.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "rtc_base/logging.h"
+#include "sdk/android/native_api/jni/java_types.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(void,
+ LoggableTest_nativeLogInfoTestMessage,
+ JNIEnv* jni,
+ jclass,
+ jstring j_message) {
+ std::string message =
+ JavaToNativeString(jni, JavaParamRef<jstring>(j_message));
+ RTC_LOG(LS_INFO) << message;
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties b/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties
new file mode 100644
index 0000000000..a6ca533fe3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java
new file mode 100644
index 0000000000..8166f5b544
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/** Unit tests for {@link AndroidVideoDecoder}. */
+@RunWith(Parameterized.class)
+public final class AndroidVideoDecoderInstrumentationTest {
+ @Parameters(name = "{0};useEglContext={1}")
+ public static Collection<Object[]> parameters() {
+ return Arrays.asList(new Object[] {/*codecName=*/"VP8", /*useEglContext=*/false},
+ new Object[] {/*codecName=*/"VP8", /*useEglContext=*/true},
+ new Object[] {/*codecName=*/"H264", /*useEglContext=*/false},
+ new Object[] {/*codecName=*/"H264", /*useEglContext=*/true});
+ }
+
+ private final VideoCodecInfo codecType;
+ private final boolean useEglContext;
+
+ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglContext) {
+ if (codecName.equals("H264")) {
+ this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC;
+ } else {
+ this.codecType = new VideoCodecInfo(codecName, new HashMap<>());
+ }
+ this.useEglContext = useEglContext;
+ }
+
+ private static final String TAG = "AndroidVideoDecoderInstrumentationTest";
+
+ private static final int TEST_FRAME_COUNT = 10;
+ private static final int TEST_FRAME_WIDTH = 640;
+ private static final int TEST_FRAME_HEIGHT = 360;
+ private VideoFrame.I420Buffer[] TEST_FRAMES;
+
+ private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
+ private static final boolean ENABLE_H264_HIGH_PROFILE = true;
+ private static final VideoEncoder.Settings ENCODER_SETTINGS = new VideoEncoder.Settings(
+ 1 /* core */,
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ 300 /* kbps */, 30 /* fps */, 1 /* numberOfSimulcastStreams */, true /* automaticResizeOn */,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
+
+ private static final int DECODE_TIMEOUT_MS = 1000;
+ private static final VideoDecoder.Settings SETTINGS = new VideoDecoder.Settings(1 /* core */,
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()));
+
+ private static class MockDecodeCallback implements VideoDecoder.Callback {
+ private BlockingQueue<VideoFrame> frameQueue = new LinkedBlockingQueue<>();
+
+ @Override
+ public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
+ assertNotNull(frame);
+ frameQueue.offer(frame);
+ }
+
+ public void assertFrameDecoded(EncodedImage testImage, VideoFrame.I420Buffer testBuffer) {
+ VideoFrame decodedFrame = poll();
+ VideoFrame.Buffer decodedBuffer = decodedFrame.getBuffer();
+ assertEquals(testImage.encodedWidth, decodedBuffer.getWidth());
+ assertEquals(testImage.encodedHeight, decodedBuffer.getHeight());
+ // TODO(sakal): Decoder looses the nanosecond precision. This is not a problem in practice
+ // because C++ EncodedImage stores the timestamp in milliseconds.
+ assertEquals(testImage.captureTimeNs / 1000, decodedFrame.getTimestampNs() / 1000);
+ assertEquals(testImage.rotation, decodedFrame.getRotation());
+ }
+
+ public VideoFrame poll() {
+ try {
+ VideoFrame frame = frameQueue.poll(DECODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ assertNotNull("Timed out waiting for the frame to be decoded.", frame);
+ return frame;
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ private static VideoFrame.I420Buffer[] generateTestFrames() {
+ VideoFrame.I420Buffer[] result = new VideoFrame.I420Buffer[TEST_FRAME_COUNT];
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ result[i] = JavaI420Buffer.allocate(
+ getAlignedNumber(TEST_FRAME_WIDTH, HardwareVideoEncoderTest.getPixelAlignmentRequired()),
+ getAlignedNumber(
+ TEST_FRAME_HEIGHT, HardwareVideoEncoderTest.getPixelAlignmentRequired()));
+ // TODO(sakal): Generate content for the test frames.
+ }
+ return result;
+ }
+
+ private final EncodedImage[] encodedTestFrames = new EncodedImage[TEST_FRAME_COUNT];
+ private EglBase14 eglBase;
+
+ private VideoDecoderFactory createDecoderFactory(EglBase.Context eglContext) {
+ return new HardwareVideoDecoderFactory(eglContext);
+ }
+
+ private @Nullable VideoDecoder createDecoder() {
+ VideoDecoderFactory factory =
+ createDecoderFactory(useEglContext ? eglBase.getEglBaseContext() : null);
+ return factory.createDecoder(codecType);
+ }
+
+ private void encodeTestFrames() {
+ VideoEncoderFactory encoderFactory = new HardwareVideoEncoderFactory(
+ eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
+ VideoEncoder encoder = encoderFactory.createEncoder(codecType);
+ HardwareVideoEncoderTest.MockEncoderCallback encodeCallback =
+ new HardwareVideoEncoderTest.MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(ENCODER_SETTINGS, encodeCallback));
+
+ long lastTimestampNs = 0;
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / ENCODER_SETTINGS.maxFramerate;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+ HardwareVideoEncoderTest.testEncodeFrame(
+ encoder, new VideoFrame(TEST_FRAMES[i], 0 /* rotation */, lastTimestampNs), info);
+ encodedTestFrames[i] = encodeCallback.poll();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ private static int getAlignedNumber(int number, int alignment) {
+ return (number / alignment) * alignment;
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+
+ TEST_FRAMES = generateTestFrames();
+
+ eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ encodeTestFrames();
+ }
+
+ @After
+ public void tearDown() {
+ eglBase.release();
+ }
+
+ @Test
+ @SmallTest
+ public void testInitialize() {
+ VideoDecoder decoder = createDecoder();
+ assertEquals(VideoCodecStatus.OK, decoder.initDecode(SETTINGS, null /* decodeCallback */));
+ assertEquals(VideoCodecStatus.OK, decoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testDecode() {
+ VideoDecoder decoder = createDecoder();
+ MockDecodeCallback callback = new MockDecodeCallback();
+ assertEquals(VideoCodecStatus.OK, decoder.initDecode(SETTINGS, callback));
+
+ for (int i = 0; i < TEST_FRAME_COUNT; i++) {
+ assertEquals(VideoCodecStatus.OK,
+ decoder.decode(encodedTestFrames[i],
+ new VideoDecoder.DecodeInfo(false /* isMissingFrames */, 0 /* renderTimeMs */)));
+ callback.assertFrameDecoded(encodedTestFrames[i], TEST_FRAMES[i]);
+ }
+
+ assertEquals(VideoCodecStatus.OK, decoder.release());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java
new file mode 100644
index 0000000000..8c9119eb7b
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/BuiltinAudioCodecsFactoryFactoryTest.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+
+public final class BuiltinAudioCodecsFactoryFactoryTest {
+ @Before
+ public void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testAudioEncoderFactoryFactoryTest() throws Exception {
+ BuiltinAudioEncoderFactoryFactory factory = new BuiltinAudioEncoderFactoryFactory();
+ long aef = 0;
+ try {
+ aef = factory.createNativeAudioEncoderFactory();
+ assertThat(aef).isNotEqualTo(0);
+ } finally {
+ if (aef != 0) {
+ JniCommon.nativeReleaseRef(aef);
+ }
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testAudioDecoderFactoryFactoryTest() throws Exception {
+ BuiltinAudioDecoderFactoryFactory factory = new BuiltinAudioDecoderFactoryFactory();
+ long adf = 0;
+ try {
+ adf = factory.createNativeAudioDecoderFactory();
+ assertThat(adf).isNotEqualTo(0);
+ } finally {
+ if (adf != 0) {
+ JniCommon.nativeReleaseRef(adf);
+ }
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
new file mode 100644
index 0000000000..37d03d99d6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera1CapturerUsingByteBufferTest {
+ static final String TAG = "Camera1CapturerUsingByteBufferTest";
+
+ private static class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public boolean isCapturingToTexture() {
+ return false;
+ }
+
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera1Enumerator(false);
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((android.hardware.Camera) camera).release();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
new file mode 100644
index 0000000000..e0419178c6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera1CapturerUsingTextureTest {
+ static final String TAG = "Camera1CapturerUsingTextureTest";
+
+ private static class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera1Enumerator();
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((android.hardware.Camera) camera).release();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ @Test
+ @LargeTest
+ public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ fixtures.cameraFreezedEventOnBufferStarvation();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
new file mode 100644
index 0000000000..b01737197a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java
@@ -0,0 +1,334 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.fail;
+
+import android.content.Context;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.util.concurrent.CountDownLatch;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class Camera2CapturerTest {
+ static final String TAG = "Camera2CapturerTest";
+
+ /**
+ * Simple camera2 implementation that only knows how to open the camera and close it.
+ */
+ private class SimpleCamera2 {
+ final CameraManager cameraManager;
+ final LooperThread looperThread;
+ final CountDownLatch openDoneSignal;
+ final Object cameraDeviceLock;
+ @Nullable CameraDevice cameraDevice; // Guarded by cameraDeviceLock
+ boolean openSucceeded; // Guarded by cameraDeviceLock
+
+ private class LooperThread extends Thread {
+ final CountDownLatch startedSignal = new CountDownLatch(1);
+ private Handler handler;
+
+ @Override
+ public void run() {
+ Looper.prepare();
+ handler = new Handler();
+ startedSignal.countDown();
+ Looper.loop();
+ }
+
+ public void waitToStart() {
+ ThreadUtils.awaitUninterruptibly(startedSignal);
+ }
+
+ public void requestStop() {
+ handler.getLooper().quit();
+ }
+
+ public Handler getHandler() {
+ return handler;
+ }
+ }
+
+ private class CameraStateCallback extends CameraDevice.StateCallback {
+ @Override
+ public void onClosed(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 closed.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = null;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 disconnected.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = null;
+ }
+ }
+
+ @Override
+ public void onError(CameraDevice cameraDevice, int errorCode) {
+ Logging.w(TAG, "Simple camera2 error: " + errorCode);
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = cameraDevice;
+ openSucceeded = false;
+ }
+
+ openDoneSignal.countDown();
+ }
+
+ @Override
+ public void onOpened(CameraDevice cameraDevice) {
+ Logging.d(TAG, "Simple camera2 opened.");
+
+ synchronized (cameraDeviceLock) {
+ SimpleCamera2.this.cameraDevice = cameraDevice;
+ openSucceeded = true;
+ }
+
+ openDoneSignal.countDown();
+ }
+ }
+
+ SimpleCamera2(Context context, String deviceName) {
+ cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ looperThread = new LooperThread();
+ looperThread.start();
+ looperThread.waitToStart();
+ cameraDeviceLock = new Object();
+ openDoneSignal = new CountDownLatch(1);
+ cameraDevice = null;
+ Logging.d(TAG, "Opening simple camera2.");
+ try {
+ cameraManager.openCamera(deviceName, new CameraStateCallback(), looperThread.getHandler());
+ } catch (CameraAccessException e) {
+ fail("Simple camera2 CameraAccessException: " + e.getMessage());
+ }
+
+ Logging.d(TAG, "Waiting for simple camera2 to open.");
+ ThreadUtils.awaitUninterruptibly(openDoneSignal);
+ synchronized (cameraDeviceLock) {
+ if (!openSucceeded) {
+ fail("Opening simple camera2 failed.");
+ }
+ }
+ }
+
+ public void close() {
+ Logging.d(TAG, "Closing simple camera2.");
+ synchronized (cameraDeviceLock) {
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ }
+ }
+
+ looperThread.requestStop();
+ ThreadUtils.joinUninterruptibly(looperThread);
+ }
+ }
+
+ private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
+ @Override
+ public CameraEnumerator getCameraEnumerator() {
+ return new Camera2Enumerator(getAppContext());
+ }
+
+ @Override
+ public Context getAppContext() {
+ return InstrumentationRegistry.getTargetContext();
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public Object rawOpenCamera(String cameraName) {
+ return new SimpleCamera2(getAppContext(), cameraName);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void rawCloseCamera(Object camera) {
+ ((SimpleCamera2) camera).close();
+ }
+ }
+
+ private CameraVideoCapturerTestFixtures fixtures;
+
+ @Before
+ public void setUp() {
+ fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
+ }
+
+ @After
+ public void tearDown() {
+ fixtures.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateAndDispose() throws InterruptedException {
+ fixtures.createCapturerAndDispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testCreateNonExistingCamera() throws InterruptedException {
+ fixtures.createNonExistingCamera();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using a "default" capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testCreateCapturerAndRender() throws InterruptedException {
+ fixtures.createCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the front facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+ fixtures.createFrontFacingCapturerAndRender();
+ }
+
+ // This test that the camera can be started and that the frames are forwarded
+ // to a Java video renderer using the back facing video capturer.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testStartBackFacingVideoCapturer() throws InterruptedException {
+ fixtures.createBackFacingCapturerAndRender();
+ }
+
+ // This test that the default camera can be started and that the camera can
+ // later be switched to another camera.
+ // It tests both the Java and the C++ layer.
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturer() throws InterruptedException {
+ fixtures.switchCamera();
+ }
+
+ @Test
+ @MediumTest
+ public void testSwitchVideoCapturerToSpecificCameraName() throws InterruptedException {
+ fixtures.switchCamera(true /* specifyCameraName */);
+ }
+
+ @Test
+ @MediumTest
+ public void testCameraEvents() throws InterruptedException {
+ fixtures.cameraEventsInvoked();
+ }
+
+ // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+ @Test
+ @MediumTest
+ public void testCameraCallsAfterStop() throws InterruptedException {
+ fixtures.cameraCallsAfterStop();
+ }
+
+ // This test that the VideoSource that the CameraVideoCapturer is connected to can
+ // be stopped and restarted. It tests both the Java and the C++ layer.
+ @Test
+ @LargeTest
+ public void testStopRestartVideoSource() throws InterruptedException {
+ fixtures.stopRestartVideoSource();
+ }
+
+ // This test that the camera can be started at different resolutions.
+ // It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testStartStopWithDifferentResolutions() throws InterruptedException {
+ fixtures.startStopWithDifferentResolutions();
+ }
+
+ // This test what happens if buffers are returned after the capturer have
+ // been stopped and restarted. It does not test or use the C++ layer.
+ @Test
+ @LargeTest
+ public void testReturnBufferLate() throws InterruptedException {
+ fixtures.returnBufferLate();
+ }
+
+ // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+ // and then return the frames. The difference between the test testReturnBufferLate() is that we
+ // also test the JNI and C++ AndroidVideoCapturer parts.
+ @Test
+ @MediumTest
+ public void testReturnBufferLateEndToEnd() throws InterruptedException {
+ fixtures.returnBufferLateEndToEnd();
+ }
+
+ // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+ // the capturer.
+ @Test
+ @LargeTest
+ public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ fixtures.cameraFreezedEventOnBufferStarvation();
+ }
+
+ // This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
+ // called. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testScaleCameraOutput() throws InterruptedException {
+ fixtures.scaleCameraOutput();
+ }
+
+ // This test that frames forwarded to a renderer is cropped to a new orientation if
+ // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack.
+ @Test
+ @MediumTest
+ public void testCropCameraOutput() throws InterruptedException {
+ fixtures.cropCameraOutput();
+ }
+
+ // This test that an error is reported if the camera is already opened
+ // when CameraVideoCapturer is started.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpen();
+ }
+
+ // This test that CameraVideoCapturer can be started, even if the camera is already opened
+ // if the camera is closed while CameraVideoCapturer is re-trying to start.
+ @Test
+ @LargeTest
+ public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
+ }
+
+ // This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
+ // re-trying to start.
+ @Test
+ @MediumTest
+ public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ fixtures.startWhileCameraIsAlreadyOpenAndStop();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
new file mode 100644
index 0000000000..aa5fb0c1c9
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java
@@ -0,0 +1,793 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.VideoFrame;
+
+class CameraVideoCapturerTestFixtures {
+ static final String TAG = "CameraVideoCapturerTestFixtures";
+ // Default values used for starting capturing
+ static final int DEFAULT_WIDTH = 640;
+ static final int DEFAULT_HEIGHT = 480;
+ static final int DEFAULT_FPS = 15;
+
+ static private class RendererCallbacks implements VideoSink {
+ private final Object frameLock = new Object();
+ private int framesRendered;
+ private int width;
+ private int height;
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (frameLock) {
+ ++framesRendered;
+ width = frame.getRotatedWidth();
+ height = frame.getRotatedHeight();
+ frameLock.notify();
+ }
+ }
+
+ public int frameWidth() {
+ synchronized (frameLock) {
+ return width;
+ }
+ }
+
+ public int frameHeight() {
+ synchronized (frameLock) {
+ return height;
+ }
+ }
+
+ public int waitForNextFrameToRender() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next frame to render");
+ synchronized (frameLock) {
+ final int framesRenderedStart = framesRendered;
+ while (framesRendered == framesRenderedStart) {
+ frameLock.wait();
+ }
+ return framesRendered;
+ }
+ }
+ }
+
+ static private class FakeAsyncRenderer implements VideoSink {
+ private final List<VideoFrame> pendingFrames = new ArrayList<VideoFrame>();
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ synchronized (pendingFrames) {
+ frame.retain();
+ pendingFrames.add(frame);
+ pendingFrames.notifyAll();
+ }
+ }
+
+ // Wait until at least one frame have been received, before returning them.
+ public List<VideoFrame> waitForPendingFrames() throws InterruptedException {
+ Logging.d(TAG, "Waiting for pending frames");
+ synchronized (pendingFrames) {
+ while (pendingFrames.isEmpty()) {
+ pendingFrames.wait();
+ }
+ return new ArrayList<VideoFrame>(pendingFrames);
+ }
+ }
+ }
+
+ static private class FakeCapturerObserver implements CapturerObserver {
+ private int framesCaptured;
+ private @Nullable VideoFrame videoFrame;
+ final private Object frameLock = new Object();
+ final private Object capturerStartLock = new Object();
+ private Boolean capturerStartResult;
+ final private List<Long> timestamps = new ArrayList<Long>();
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ Logging.d(TAG, "onCapturerStarted: " + success);
+
+ synchronized (capturerStartLock) {
+ capturerStartResult = success;
+ capturerStartLock.notifyAll();
+ }
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ Logging.d(TAG, "onCapturerStopped");
+ }
+
+ @Override
+ public void onFrameCaptured(VideoFrame frame) {
+ synchronized (frameLock) {
+ ++framesCaptured;
+ if (videoFrame != null) {
+ videoFrame.release();
+ }
+ videoFrame = frame;
+ videoFrame.retain();
+ timestamps.add(videoFrame.getTimestampNs());
+ frameLock.notify();
+ }
+ }
+
+ public boolean waitForCapturerToStart() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the capturer to start");
+ synchronized (capturerStartLock) {
+ while (capturerStartResult == null) {
+ capturerStartLock.wait();
+ }
+ return capturerStartResult;
+ }
+ }
+
+ public int waitForNextCapturedFrame() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the next captured frame");
+ synchronized (frameLock) {
+ final int framesCapturedStart = framesCaptured;
+ while (framesCaptured == framesCapturedStart) {
+ frameLock.wait();
+ }
+ return framesCaptured;
+ }
+ }
+
+ int frameWidth() {
+ synchronized (frameLock) {
+ return videoFrame.getBuffer().getWidth();
+ }
+ }
+
+ int frameHeight() {
+ synchronized (frameLock) {
+ return videoFrame.getBuffer().getHeight();
+ }
+ }
+
+ void releaseFrame() {
+ synchronized (frameLock) {
+ if (videoFrame != null) {
+ videoFrame.release();
+ videoFrame = null;
+ }
+ }
+ }
+
+ List<Long> getCopyAndResetListOftimeStamps() {
+ synchronized (frameLock) {
+ ArrayList<Long> list = new ArrayList<Long>(timestamps);
+ timestamps.clear();
+ return list;
+ }
+ }
+ }
+
+ static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
+ public boolean onCameraOpeningCalled;
+ public boolean onFirstFrameAvailableCalled;
+ private final Object onCameraFreezedLock = new Object();
+ private String onCameraFreezedDescription;
+ private final Object cameraClosedLock = new Object();
+ private boolean cameraClosed = true;
+
+ @Override
+ public void onCameraError(String errorDescription) {
+ Logging.w(TAG, "Camera error: " + errorDescription);
+ cameraClosed = true;
+ }
+
+ @Override
+ public void onCameraDisconnected() {}
+
+ @Override
+ public void onCameraFreezed(String errorDescription) {
+ synchronized (onCameraFreezedLock) {
+ onCameraFreezedDescription = errorDescription;
+ onCameraFreezedLock.notifyAll();
+ }
+ }
+
+ @Override
+ public void onCameraOpening(String cameraName) {
+ onCameraOpeningCalled = true;
+ synchronized (cameraClosedLock) {
+ cameraClosed = false;
+ }
+ }
+
+ @Override
+ public void onFirstFrameAvailable() {
+ onFirstFrameAvailableCalled = true;
+ }
+
+ @Override
+ public void onCameraClosed() {
+ synchronized (cameraClosedLock) {
+ cameraClosed = true;
+ cameraClosedLock.notifyAll();
+ }
+ }
+
+ public String waitForCameraFreezed() throws InterruptedException {
+ Logging.d(TAG, "Waiting for the camera to freeze");
+ synchronized (onCameraFreezedLock) {
+ while (onCameraFreezedDescription == null) {
+ onCameraFreezedLock.wait();
+ }
+ return onCameraFreezedDescription;
+ }
+ }
+
+ public void waitForCameraClosed() throws InterruptedException {
+ synchronized (cameraClosedLock) {
+ while (!cameraClosed) {
+ Logging.d(TAG, "Waiting for the camera to close.");
+ cameraClosedLock.wait();
+ }
+ }
+ }
+ }
+
+ /**
+ * Class to collect all classes related to single capturer instance.
+ */
+ static private class CapturerInstance {
+ public CameraVideoCapturer capturer;
+ public CameraEvents cameraEvents;
+ public SurfaceTextureHelper surfaceTextureHelper;
+ public FakeCapturerObserver observer;
+ public List<CaptureFormat> supportedFormats;
+ public CaptureFormat format;
+ }
+
+ /**
+ * Class used for collecting a VideoSource, a VideoTrack and a renderer. The class
+ * is used for testing local rendering from a capturer.
+ */
+ static private class VideoTrackWithRenderer {
+ public SurfaceTextureHelper surfaceTextureHelper;
+ public VideoSource source;
+ public VideoTrack track;
+ public RendererCallbacks rendererCallbacks;
+ public FakeAsyncRenderer fakeAsyncRenderer;
+ }
+
+ public abstract static class TestObjectFactory {
+ final CameraEnumerator cameraEnumerator;
+
+ TestObjectFactory() {
+ cameraEnumerator = getCameraEnumerator();
+ }
+
+ public CameraVideoCapturer createCapturer(
+ String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return cameraEnumerator.createCapturer(name, eventsHandler);
+ }
+
+ public @Nullable String getNameOfFrontFacingDevice() {
+ for (String deviceName : cameraEnumerator.getDeviceNames()) {
+ if (cameraEnumerator.isFrontFacing(deviceName)) {
+ return deviceName;
+ }
+ }
+
+ return null;
+ }
+
+ public @Nullable String getNameOfBackFacingDevice() {
+ for (String deviceName : cameraEnumerator.getDeviceNames()) {
+ if (cameraEnumerator.isBackFacing(deviceName)) {
+ return deviceName;
+ }
+ }
+
+ return null;
+ }
+
+ public boolean haveTwoCameras() {
+ return cameraEnumerator.getDeviceNames().length >= 2;
+ }
+
+ public boolean isCapturingToTexture() {
+ // In the future, we plan to only support capturing to texture, so default to true
+ return true;
+ }
+
+ abstract public CameraEnumerator getCameraEnumerator();
+ abstract public Context getAppContext();
+
+ // CameraVideoCapturer API is too slow for some of our tests where we need to open a competing
+ // camera. These methods are used instead.
+ abstract public Object rawOpenCamera(String cameraName);
+ abstract public void rawCloseCamera(Object camera);
+ }
+
+ private PeerConnectionFactory peerConnectionFactory;
+ private TestObjectFactory testObjectFactory;
+
+ CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(testObjectFactory.getAppContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ this.peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ this.testObjectFactory = testObjectFactory;
+ }
+
+ public void dispose() {
+ this.peerConnectionFactory.dispose();
+ }
+
+ // Internal helper methods
+ private CapturerInstance createCapturer(String name, boolean initialize) {
+ CapturerInstance instance = new CapturerInstance();
+ instance.cameraEvents = new CameraEvents();
+ instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
+ instance.surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
+ instance.observer = new FakeCapturerObserver();
+ if (initialize) {
+ instance.capturer.initialize(
+ instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
+ }
+ instance.supportedFormats = testObjectFactory.cameraEnumerator.getSupportedFormats(name);
+ return instance;
+ }
+
+ private CapturerInstance createCapturer(boolean initialize) {
+ String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
+ return createCapturer(name, initialize);
+ }
+
+ private void startCapture(CapturerInstance instance) {
+ startCapture(instance, 0);
+ }
+
+ private void startCapture(CapturerInstance instance, int formatIndex) {
+ final CameraEnumerationAndroid.CaptureFormat format =
+ instance.supportedFormats.get(formatIndex);
+
+ instance.capturer.startCapture(format.width, format.height, format.framerate.max);
+ instance.format = format;
+ }
+
+ private void disposeCapturer(CapturerInstance instance) throws InterruptedException {
+ instance.capturer.stopCapture();
+ instance.cameraEvents.waitForCameraClosed();
+ instance.capturer.dispose();
+ instance.observer.releaseFrame();
+ instance.surfaceTextureHelper.dispose();
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(
+ CameraVideoCapturer capturer, VideoSink rendererCallbacks) {
+ VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
+ videoTrackWithRenderer.surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
+ videoTrackWithRenderer.source =
+ peerConnectionFactory.createVideoSource(/* isScreencast= */ false);
+ capturer.initialize(videoTrackWithRenderer.surfaceTextureHelper,
+ testObjectFactory.getAppContext(), videoTrackWithRenderer.source.getCapturerObserver());
+ capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
+ videoTrackWithRenderer.track =
+ peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
+ videoTrackWithRenderer.track.addSink(rendererCallbacks);
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) {
+ RendererCallbacks rendererCallbacks = new RendererCallbacks();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, rendererCallbacks);
+ videoTrackWithRenderer.rendererCallbacks = rendererCallbacks;
+ return videoTrackWithRenderer;
+ }
+
+ private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer(
+ CameraVideoCapturer capturer) {
+ FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer();
+ VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturer, fakeAsyncRenderer);
+ videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer;
+ return videoTrackWithRenderer;
+ }
+
+ private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) {
+ videoTrackWithRenderer.track.dispose();
+ videoTrackWithRenderer.source.dispose();
+ }
+
+ private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
+ final CountDownLatch barrier = new CountDownLatch(1);
+ capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ barrier.countDown();
+ }
+ });
+ barrier.await();
+ }
+
+ private void createCapturerAndRender(String name) throws InterruptedException {
+ if (name == null) {
+ Logging.w(TAG, "Skipping video capturer test because device name is null.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ // Test methods
+ public void createCapturerAndDispose() throws InterruptedException {
+ disposeCapturer(createCapturer(true /* initialize */));
+ }
+
+ public void createNonExistingCamera() throws InterruptedException {
+ try {
+ disposeCapturer(createCapturer("non-existing camera", false /* initialize */));
+ } catch (IllegalArgumentException e) {
+ return;
+ }
+
+ fail("Expected illegal argument exception when creating non-existing camera.");
+ }
+
+ public void createCapturerAndRender() throws InterruptedException {
+ String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
+ createCapturerAndRender(name);
+ }
+
+ public void createFrontFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice());
+ }
+
+ public void createBackFacingCapturerAndRender() throws InterruptedException {
+ createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice());
+ }
+
+ public void switchCamera() throws InterruptedException {
+ switchCamera(false /* specifyCameraName */);
+ }
+
+ public void switchCamera(boolean specifyCameraName) throws InterruptedException {
+ if (!testObjectFactory.haveTwoCameras()) {
+ Logging.w(
+ TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
+ return;
+ }
+
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ // Wait for the camera to start so we can switch it
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ // Array with one element to avoid final problem in nested classes.
+ final boolean[] cameraSwitchSuccessful = new boolean[1];
+ final CountDownLatch barrier = new CountDownLatch(1);
+ final CameraVideoCapturer.CameraSwitchHandler cameraSwitchHandler =
+ new CameraVideoCapturer.CameraSwitchHandler() {
+ @Override
+ public void onCameraSwitchDone(boolean isFrontCamera) {
+ cameraSwitchSuccessful[0] = true;
+ barrier.countDown();
+ }
+ @Override
+ public void onCameraSwitchError(String errorDescription) {
+ cameraSwitchSuccessful[0] = false;
+ barrier.countDown();
+ }
+ };
+ if (specifyCameraName) {
+ String expectedCameraName = testObjectFactory.cameraEnumerator.getDeviceNames()[1];
+ capturerInstance.capturer.switchCamera(cameraSwitchHandler, expectedCameraName);
+ } else {
+ capturerInstance.capturer.switchCamera(cameraSwitchHandler);
+ }
+ // Wait until the camera has been switched.
+ barrier.await();
+
+ // Check result.
+ assertTrue(cameraSwitchSuccessful[0]);
+ // Ensure that frames are received.
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ public void cameraEventsInvoked() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started and first frame is received and then stop it.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
+ disposeCapturer(capturerInstance);
+
+ assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled);
+ assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled);
+ }
+
+ public void cameraCallsAfterStop() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started and then stop it.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.observer.releaseFrame();
+
+ // We can't change `capturer` at this point, but we should not crash.
+ capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
+ capturerInstance.capturer.changeCaptureFormat(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
+
+ disposeCapturer(capturerInstance);
+ }
+
+ public void stopRestartVideoSource() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ capturerInstance.capturer.stopCapture();
+ assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state());
+
+ startCapture(capturerInstance);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+ assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+ }
+
+ public void startStopWithDifferentResolutions() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+
+ for (int i = 0; i < 3; ++i) {
+ startCapture(capturerInstance, i);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ capturerInstance.observer.waitForNextCapturedFrame();
+
+ // Check the frame size. The actual width and height depend on how the capturer is mounted.
+ final boolean identicalResolution =
+ (capturerInstance.observer.frameWidth() == capturerInstance.format.width
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.height);
+ final boolean flippedResolution =
+ (capturerInstance.observer.frameWidth() == capturerInstance.format.height
+ && capturerInstance.observer.frameHeight() == capturerInstance.format.width);
+ if (!identicalResolution && !flippedResolution) {
+ fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
+ + capturerInstance.observer.frameHeight() + " expected: "
+ + capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
+ + capturerInstance.format.height + "x" + capturerInstance.format.width);
+ }
+
+ capturerInstance.capturer.stopCapture();
+ capturerInstance.observer.releaseFrame();
+ }
+ disposeCapturer(capturerInstance);
+ }
+
+ public void returnBufferLate() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+ List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+
+ startCapture(capturerInstance, 1);
+ capturerInstance.observer.waitForCapturerToStart();
+ capturerInstance.observer.releaseFrame();
+
+ capturerInstance.observer.waitForNextCapturedFrame();
+ capturerInstance.capturer.stopCapture();
+
+ listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
+ assertTrue(listOftimestamps.size() >= 1);
+
+ disposeCapturer(capturerInstance);
+ }
+
+ public void returnBufferLateEndToEnd() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
+ // Wait for at least one frame that has not been returned.
+ assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());
+
+ capturerInstance.capturer.stopCapture();
+
+ // Dispose everything.
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ // Return the frame(s), on a different thread out of spite.
+ final List<VideoFrame> pendingFrames =
+ videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
+ final Thread returnThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ for (VideoFrame frame : pendingFrames) {
+ frame.release();
+ }
+ }
+ });
+ returnThread.start();
+ returnThread.join();
+ }
+
+ public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
+ startCapture(capturerInstance);
+ // Make sure camera is started.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ // Since we don't return the buffer, we should get a starvation message if we are
+ // capturing to a texture.
+ assertEquals("Camera failure. Client must return video buffers.",
+ capturerInstance.cameraEvents.waitForCameraFreezed());
+
+ capturerInstance.capturer.stopCapture();
+ disposeCapturer(capturerInstance);
+ }
+
+ public void scaleCameraOutput() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
+ final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
+ final int frameRate = 30;
+ final int scaledWidth = startWidth / 2;
+ final int scaledHeight = startHeight / 2;
+
+ // Request the captured frames to be scaled.
+ videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate);
+
+ boolean gotExpectedResolution = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
+ && videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
+ } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ assertTrue(gotExpectedResolution);
+ }
+
+ public void cropCameraOutput() throws InterruptedException {
+ final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
+
+ final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
+ final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
+ final int frameRate = 30;
+ final int cropWidth;
+ final int cropHeight;
+ if (startWidth > startHeight) {
+ // Landscape input, request portrait output.
+ cropWidth = 360;
+ cropHeight = 640;
+ } else {
+ // Portrait input, request landscape output.
+ cropWidth = 640;
+ cropHeight = 630;
+ }
+
+ // Request different output orientation than input.
+ videoTrackWithRenderer.source.adaptOutputFormat(
+ cropWidth, cropHeight, cropWidth, cropHeight, frameRate);
+
+ boolean gotExpectedOrientation = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedOrientation = (cropWidth > cropHeight)
+ == (videoTrackWithRenderer.rendererCallbacks.frameWidth()
+ > videoTrackWithRenderer.rendererCallbacks.frameHeight());
+ } while (!gotExpectedOrientation && numberOfInspectedFrames < 30);
+
+ disposeCapturer(capturerInstance);
+ disposeVideoTrackWithRenderer(videoTrackWithRenderer);
+
+ assertTrue(gotExpectedOrientation);
+ }
+
+ public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+
+ if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+ // The first opened camera client will be evicted.
+ assertTrue(capturerInstance.observer.waitForCapturerToStart());
+ } else {
+ assertFalse(capturerInstance.observer.waitForCapturerToStart());
+ }
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera.");
+ final VideoTrackWithRenderer videoTrackWithRenderer =
+ createVideoTrackWithRenderer(capturerInstance.capturer);
+ waitUntilIdle(capturerInstance);
+
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera.");
+ testObjectFactory.rawCloseCamera(competingCamera);
+
+ // Make sure camera is started and first frame is received and then stop it.
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start.");
+ videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
+ Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture.");
+ disposeCapturer(capturerInstance);
+ }
+
+ public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+ final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
+ // At this point camera is not actually opened.
+ final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
+
+ final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
+
+ startCapture(capturerInstance);
+ disposeCapturer(capturerInstance);
+
+ testObjectFactory.rawCloseCamera(competingCamera);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
new file mode 100644
index 0000000000..9721cbd818
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2017 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.HashMap;
+import org.junit.Before;
+import org.junit.Test;
+
+/** Unit tests for {@link DefaultVideoEncoderFactory}. */
+public class DefaultVideoEncoderFactoryTest {
+ static class CustomHardwareVideoEncoderFactory implements VideoEncoderFactory {
+ private ArrayList<VideoCodecInfo> codecs = new ArrayList<>();
+
+ public CustomHardwareVideoEncoderFactory(boolean includeVP8, boolean includeH264High) {
+ if (includeVP8) {
+ codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+ }
+ codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+
+ HashMap<String, String> baselineParams = new HashMap<String, String>();
+ baselineParams.put("profile-level-id", "42e01f");
+ baselineParams.put("level-asymmetry-allowed", "1");
+ baselineParams.put("packetization-mode", "1");
+ codecs.add(new VideoCodecInfo("H264", baselineParams));
+
+ if (includeH264High) {
+ HashMap<String, String> highParams = new HashMap<String, String>();
+ highParams.put("profile-level-id", "640c1f");
+ highParams.put("level-asymmetry-allowed", "1");
+ highParams.put("packetization-mode", "1");
+ codecs.add(new VideoCodecInfo("H264", highParams));
+ }
+ }
+
+ @Override
+ public @Nullable VideoEncoder createEncoder(VideoCodecInfo info) {
+ return null;
+ }
+
+ @Override
+ public VideoCodecInfo[] getSupportedCodecs() {
+ return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+ }
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithHardwareH264HighProfile() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, true);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(5, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithoutHardwareH264HighProfile() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(true, false);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(4, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ }
+
+ @SmallTest
+ @Test
+ public void testGetSupportedCodecsWithoutHardwareVP8() {
+ VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(false, true);
+ DefaultVideoEncoderFactory dvef = new DefaultVideoEncoderFactory(hwFactory);
+ VideoCodecInfo[] videoCodecs = dvef.getSupportedCodecs();
+ assertEquals(5, videoCodecs.length);
+ assertEquals("VP8", videoCodecs[0].name);
+ assertEquals("VP9", videoCodecs[1].name);
+ assertEquals("AV1", videoCodecs[2].name);
+ assertEquals("H264", videoCodecs[3].name);
+ assertEquals("42e01f", videoCodecs[3].params.get("profile-level-id"));
+ assertEquals("H264", videoCodecs[4].name);
+ assertEquals("640c1f", videoCodecs[4].params.get("profile-level-id"));
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java
new file mode 100644
index 0000000000..8b5e95b855
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/EglRendererTest.java
@@ -0,0 +1,366 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.concurrent.CountDownLatch;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+// EmptyActivity is needed for the surface.
+public class EglRendererTest {
+ private final static String TAG = "EglRendererTest";
+ private final static int RENDER_WAIT_MS = 1000;
+ private final static int SURFACE_WAIT_MS = 1000;
+ private final static int TEST_FRAME_WIDTH = 4;
+ private final static int TEST_FRAME_HEIGHT = 4;
+ private final static int REMOVE_FRAME_LISTENER_RACY_NUM_TESTS = 10;
+ // Some arbitrary frames.
+ private final static byte[][][] TEST_FRAMES_DATA = {
+ {
+ new byte[] {
+ -99, -93, -88, -83, -78, -73, -68, -62, -56, -52, -46, -41, -36, -31, -26, -20},
+ new byte[] {110, 113, 116, 118}, new byte[] {31, 45, 59, 73},
+ },
+ {
+ new byte[] {
+ -108, -103, -98, -93, -87, -82, -77, -72, -67, -62, -56, -50, -45, -40, -35, -30},
+ new byte[] {120, 123, 125, -127}, new byte[] {87, 100, 114, 127},
+ },
+ {
+ new byte[] {
+ -117, -112, -107, -102, -97, -92, -87, -81, -75, -71, -65, -60, -55, -50, -44, -39},
+ new byte[] {113, 116, 118, 120}, new byte[] {45, 59, 73, 87},
+ },
+ };
+ private final static ByteBuffer[][] TEST_FRAMES =
+ copyTestDataToDirectByteBuffers(TEST_FRAMES_DATA);
+
+ private static class TestFrameListener implements EglRenderer.FrameListener {
+ final private ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
+ boolean bitmapReceived;
+ Bitmap storedBitmap;
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(Bitmap bitmap) {
+ if (bitmapReceived) {
+ fail("Unexpected bitmap was received.");
+ }
+
+ bitmapReceived = true;
+ storedBitmap = bitmap;
+ notify();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized boolean waitForBitmap(int timeoutMs) throws InterruptedException {
+ final long endTimeMs = System.currentTimeMillis() + timeoutMs;
+ while (!bitmapReceived) {
+ final long waitTimeMs = endTimeMs - System.currentTimeMillis();
+ if (waitTimeMs < 0) {
+ return false;
+ }
+ wait(timeoutMs);
+ }
+ return true;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized Bitmap resetAndGetBitmap() {
+ bitmapReceived = false;
+ return storedBitmap;
+ }
+ }
+
+ final TestFrameListener testFrameListener = new TestFrameListener();
+
+ EglRenderer eglRenderer;
+ CountDownLatch surfaceReadyLatch = new CountDownLatch(1);
+ int oesTextureId;
+ SurfaceTexture surfaceTexture;
+
+ @Before
+ public void setUp() throws Exception {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ eglRenderer = new EglRenderer("TestRenderer: ");
+ eglRenderer.init(null /* sharedContext */, EglBase.CONFIG_RGBA, new GlRectDrawer());
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ surfaceTexture.setDefaultBufferSize(1 /* width */, 1 /* height */);
+ eglRenderer.createEglSurface(surfaceTexture);
+ }
+
+ @After
+ public void tearDown() {
+ surfaceTexture.release();
+ GLES20.glDeleteTextures(1 /* n */, new int[] {oesTextureId}, 0 /* offset */);
+ eglRenderer.release();
+ }
+
+ /** Checks the bitmap is not null and the correct size. */
+ private static void checkBitmap(Bitmap bitmap, float scale) {
+ assertNotNull(bitmap);
+ assertEquals((int) (TEST_FRAME_WIDTH * scale), bitmap.getWidth());
+ assertEquals((int) (TEST_FRAME_HEIGHT * scale), bitmap.getHeight());
+ }
+
+ /**
+ * Does linear sampling on U/V plane of test data.
+ *
+ * @param data Plane data to be sampled from.
+ * @param planeWidth Width of the plane data. This is also assumed to be the stride.
+ * @param planeHeight Height of the plane data.
+ * @param x X-coordinate in range [0, 1].
+ * @param y Y-coordinate in range [0, 1].
+ */
+ private static float linearSample(
+ ByteBuffer plane, int planeWidth, int planeHeight, float x, float y) {
+ final int stride = planeWidth;
+
+ final float coordX = x * planeWidth;
+ final float coordY = y * planeHeight;
+
+ int lowIndexX = (int) Math.floor(coordX - 0.5f);
+ int lowIndexY = (int) Math.floor(coordY - 0.5f);
+ int highIndexX = lowIndexX + 1;
+ int highIndexY = lowIndexY + 1;
+
+ final float highWeightX = coordX - lowIndexX - 0.5f;
+ final float highWeightY = coordY - lowIndexY - 0.5f;
+ final float lowWeightX = 1f - highWeightX;
+ final float lowWeightY = 1f - highWeightY;
+
+ // Clamp on the edges.
+ lowIndexX = Math.max(0, lowIndexX);
+ lowIndexY = Math.max(0, lowIndexY);
+ highIndexX = Math.min(planeWidth - 1, highIndexX);
+ highIndexY = Math.min(planeHeight - 1, highIndexY);
+
+ float lowYValue = (plane.get(lowIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ + (plane.get(lowIndexY * stride + highIndexX) & 0xFF) * highWeightX;
+ float highYValue = (plane.get(highIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ + (plane.get(highIndexY * stride + highIndexX) & 0xFF) * highWeightX;
+
+ return lowWeightY * lowYValue + highWeightY * highYValue;
+ }
+
+ private static byte saturatedFloatToByte(float c) {
+ return (byte) Math.round(255f * Math.max(0f, Math.min(1f, c)));
+ }
+
+ /**
+ * Converts test data YUV frame to expected RGBA frame. Tries to match the behavior of OpenGL
+ * YUV drawer shader. Does linear sampling on the U- and V-planes.
+ *
+ * @param yuvFrame Array of size 3 containing Y-, U-, V-planes for image of size
+ * (TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT). U- and V-planes should be half the size
+ * of the Y-plane.
+ */
+ private static byte[] convertYUVFrameToRGBA(ByteBuffer[] yuvFrame) {
+ final byte[] argbFrame = new byte[TEST_FRAME_WIDTH * TEST_FRAME_HEIGHT * 4];
+ final int argbStride = TEST_FRAME_WIDTH * 4;
+ final int yStride = TEST_FRAME_WIDTH;
+
+ final int vStride = TEST_FRAME_WIDTH / 2;
+
+ for (int y = 0; y < TEST_FRAME_HEIGHT; y++) {
+ for (int x = 0; x < TEST_FRAME_WIDTH; x++) {
+ final float yC = ((yuvFrame[0].get(y * yStride + x) & 0xFF) - 16f) / 219f;
+ final float uC = (linearSample(yuvFrame[1], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
+ (x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
+ - 16f)
+ / 224f
+ - 0.5f;
+ final float vC = (linearSample(yuvFrame[2], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
+ (x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
+ - 16f)
+ / 224f
+ - 0.5f;
+ final float rC = yC + 1.403f * vC;
+ final float gC = yC - 0.344f * uC - 0.714f * vC;
+ final float bC = yC + 1.77f * uC;
+
+ argbFrame[y * argbStride + x * 4 + 0] = saturatedFloatToByte(rC);
+ argbFrame[y * argbStride + x * 4 + 1] = saturatedFloatToByte(gC);
+ argbFrame[y * argbStride + x * 4 + 2] = saturatedFloatToByte(bC);
+ argbFrame[y * argbStride + x * 4 + 3] = (byte) 255;
+ }
+ }
+
+ return argbFrame;
+ }
+
+ /** Checks that the bitmap content matches the test frame with the given index. */
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ private static void checkBitmapContent(Bitmap bitmap, int frame) {
+ checkBitmap(bitmap, 1f);
+
+ byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
+ ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
+ bitmap.copyPixelsToBuffer(bitmapBuffer);
+
+ for (int i = 0; i < expectedRGBA.length; i++) {
+ int expected = expectedRGBA[i] & 0xFF;
+ int value = bitmapBuffer.get(i) & 0xFF;
+ // Due to unknown conversion differences check value matches +-1.
+ if (Math.abs(value - expected) > 1) {
+ Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
+ Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
+ fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
+ + " Result: " + value);
+ }
+ }
+ }
+
+ /** Tells eglRenderer to render test frame with given index. */
+ private void feedFrame(int i) {
+ final VideoFrame.I420Buffer buffer = JavaI420Buffer.wrap(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT,
+ TEST_FRAMES[i][0], TEST_FRAME_WIDTH, TEST_FRAMES[i][1], TEST_FRAME_WIDTH / 2,
+ TEST_FRAMES[i][2], TEST_FRAME_WIDTH / 2, null /* releaseCallback */);
+ final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestamp */);
+ eglRenderer.onFrame(frame);
+ frame.release();
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListener() throws Exception {
+ eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ assertNull(testFrameListener.resetAndGetBitmap());
+ eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ assertNull(testFrameListener.resetAndGetBitmap());
+ feedFrame(2);
+ // Check we get no more bitmaps than two.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListenerBitmap() throws Exception {
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddFrameListenerBitmapScale() throws Exception {
+ for (int i = 0; i < 3; ++i) {
+ float scale = i * 0.5f + 0.5f;
+ eglRenderer.addFrameListener(testFrameListener, scale);
+ feedFrame(i);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmap(testFrameListener.resetAndGetBitmap(), scale);
+ }
+ }
+
+ /**
+ * Checks that the frame listener will not be called with a frame that was delivered before the
+ * frame listener was added.
+ */
+ @Test
+ @SmallTest
+ public void testFrameListenerNotCalledWithOldFrames() throws Exception {
+ feedFrame(0);
+ eglRenderer.addFrameListener(testFrameListener, 0f);
+ // Check the old frame does not trigger frame listener.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ /** Checks that the frame listener will not be called after it is removed. */
+ @Test
+ @SmallTest
+ public void testRemoveFrameListenerNotRacy() throws Exception {
+ for (int i = 0; i < REMOVE_FRAME_LISTENER_RACY_NUM_TESTS; i++) {
+ feedFrame(0);
+ eglRenderer.addFrameListener(testFrameListener, 0f);
+ eglRenderer.removeFrameListener(testFrameListener);
+ feedFrame(1);
+ }
+ // Check the frame listener hasn't triggered.
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ @Test
+ @SmallTest
+ public void testFrameListenersFpsReduction() throws Exception {
+ // Test that normal frame listeners receive frames while the renderer is paused.
+ eglRenderer.pauseVideo();
+ eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
+ feedFrame(0);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
+
+ // Test that frame listeners with FPS reduction applied receive frames while the renderer is not
+ // paused.
+ eglRenderer.disableFpsReduction();
+ eglRenderer.addFrameListener(
+ testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
+ feedFrame(1);
+ assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
+
+ // Test that frame listeners with FPS reduction applied will not receive frames while the
+ // renderer is paused.
+ eglRenderer.pauseVideo();
+ eglRenderer.addFrameListener(
+ testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
+ feedFrame(1);
+ assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
+ }
+
+ private static ByteBuffer[][] copyTestDataToDirectByteBuffers(byte[][][] testData) {
+ final ByteBuffer[][] result = new ByteBuffer[testData.length][];
+
+ for (int i = 0; i < testData.length; i++) {
+ result[i] = new ByteBuffer[testData[i].length];
+ for (int j = 0; j < testData[i].length; j++) {
+ result[i][j] = ByteBuffer.allocateDirect(testData[i][j].length);
+ result[i][j].put(testData[i][j]);
+ result[i][j].rewind();
+ }
+ }
+ return result;
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java
new file mode 100644
index 0000000000..8584ddf464
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import android.os.Environment;
+import androidx.test.filters.SmallTest;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import org.junit.Before;
+import org.junit.Test;
+
+public class FileVideoCapturerTest {
+ public static class MockCapturerObserver implements CapturerObserver {
+ private final ArrayList<VideoFrame> frames = new ArrayList<VideoFrame>();
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ assertTrue(success);
+ }
+
+ @Override
+ public void onCapturerStopped() {
+ // Empty on purpose.
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrameCaptured(VideoFrame frame) {
+ frame.retain();
+ frames.add(frame);
+ notify();
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized ArrayList<VideoFrame> getMinimumFramesBlocking(int minFrames)
+ throws InterruptedException {
+ while (frames.size() < minFrames) {
+ wait();
+ }
+ return new ArrayList<VideoFrame>(frames);
+ }
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testVideoCaptureFromFile() throws InterruptedException, IOException {
+ final int FRAME_WIDTH = 4;
+ final int FRAME_HEIGHT = 4;
+ final int FRAME_CHROMA_WIDTH = (FRAME_WIDTH + 1) / 2;
+ final int FRAME_CHROMA_HEIGHT = (FRAME_HEIGHT + 1) / 2;
+ final int FRAME_SIZE_Y = FRAME_WIDTH * FRAME_HEIGHT;
+ final int FRAME_SIZE_CHROMA = FRAME_CHROMA_WIDTH * FRAME_CHROMA_HEIGHT;
+
+ final FileVideoCapturer fileVideoCapturer =
+ new FileVideoCapturer(Environment.getExternalStorageDirectory().getPath()
+ + "/chromium_tests_root/sdk/android/instrumentationtests/src/org/webrtc/"
+ + "capturetestvideo.y4m");
+ final MockCapturerObserver capturerObserver = new MockCapturerObserver();
+ fileVideoCapturer.initialize(
+ null /* surfaceTextureHelper */, null /* applicationContext */, capturerObserver);
+ fileVideoCapturer.startCapture(FRAME_WIDTH, FRAME_HEIGHT, 33 /* fps */);
+
+ final String[] expectedFrames = {
+ "THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
+
+ final ArrayList<VideoFrame> frames =
+ capturerObserver.getMinimumFramesBlocking(expectedFrames.length);
+ assertEquals(expectedFrames.length, frames.size());
+
+ fileVideoCapturer.stopCapture();
+ fileVideoCapturer.dispose();
+
+ // Check the content of the frames.
+ for (int i = 0; i < expectedFrames.length; ++i) {
+ final VideoFrame frame = frames.get(i);
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+ assertTrue(buffer instanceof VideoFrame.I420Buffer);
+ final VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
+
+ assertEquals(FRAME_WIDTH, i420Buffer.getWidth());
+ assertEquals(FRAME_HEIGHT, i420Buffer.getHeight());
+
+ final ByteBuffer dataY = i420Buffer.getDataY();
+ final ByteBuffer dataU = i420Buffer.getDataU();
+ final ByteBuffer dataV = i420Buffer.getDataV();
+
+ assertEquals(FRAME_SIZE_Y, dataY.remaining());
+ assertEquals(FRAME_SIZE_CHROMA, dataU.remaining());
+ assertEquals(FRAME_SIZE_CHROMA, dataV.remaining());
+
+ ByteBuffer frameContents = ByteBuffer.allocate(FRAME_SIZE_Y + 2 * FRAME_SIZE_CHROMA);
+ frameContents.put(dataY);
+ frameContents.put(dataU);
+ frameContents.put(dataV);
+ frameContents.rewind(); // Move back to the beginning.
+
+ assertByteBufferContents(
+ expectedFrames[i].getBytes(Charset.forName("US-ASCII")), frameContents);
+ frame.release();
+ }
+ }
+
+ private static void assertByteBufferContents(byte[] expected, ByteBuffer actual) {
+ assertEquals("Unexpected ByteBuffer size.", expected.length, actual.remaining());
+ for (int i = 0; i < expected.length; i++) {
+ assertEquals("Unexpected byte at index: " + i, expected[i], actual.get());
+ }
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java
new file mode 100644
index 0000000000..4cee3bdf71
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/GlRectDrawerTest.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.opengl.GLES20;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.Random;
+import org.junit.Test;
+
+public class GlRectDrawerTest {
+ // Resolution of the test image.
+ private static final int WIDTH = 16;
+ private static final int HEIGHT = 16;
+ // Seed for random pixel creation.
+ private static final int SEED = 42;
+ // When comparing pixels, allow some slack for float arithmetic and integer rounding.
+ private static final float MAX_DIFF = 1.5f;
+
+ // clang-format off
+ private static final float[] IDENTITY_MATRIX = {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1};
+ // clang-format on
+
+ private static float normalizedByte(byte b) {
+ return (b & 0xFF) / 255.0f;
+ }
+
+ private static float saturatedConvert(float c) {
+ return 255.0f * Math.max(0, Math.min(c, 1));
+ }
+
+ // Assert RGB ByteBuffers are pixel perfect identical.
+ private static void assertByteBufferEquals(
+ int width, int height, ByteBuffer actual, ByteBuffer expected) {
+ actual.rewind();
+ expected.rewind();
+ assertEquals(actual.remaining(), width * height * 3);
+ assertEquals(expected.remaining(), width * height * 3);
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int actualR = actual.get() & 0xFF;
+ final int actualG = actual.get() & 0xFF;
+ final int actualB = actual.get() & 0xFF;
+ final int expectedR = expected.get() & 0xFF;
+ final int expectedG = expected.get() & 0xFF;
+ final int expectedB = expected.get() & 0xFF;
+ if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
+ fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+ + "(" + x + ", " + y + "). Expected color (R,G,B): "
+ + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+ + " but was: "
+ + "(" + actualR + ", " + actualG + ", " + actualB + ").");
+ }
+ }
+ }
+ }
+
+ // Convert RGBA ByteBuffer to RGB ByteBuffer.
+ private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
+ rgbaBuffer.rewind();
+ assertEquals(rgbaBuffer.remaining() % 4, 0);
+ final int numberOfPixels = rgbaBuffer.remaining() / 4;
+ final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
+ while (rgbaBuffer.hasRemaining()) {
+ // Copy RGB.
+ for (int channel = 0; channel < 3; ++channel) {
+ rgbBuffer.put(rgbaBuffer.get());
+ }
+ // Drop alpha.
+ rgbaBuffer.get();
+ }
+ return rgbBuffer;
+ }
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @SmallTest
+ public void testRgbRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Upload the RGB byte buffer data as a texture.
+ final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+ GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+
+ // Draw the RGB frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawRgb(rgbTexture, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */, 0 /* viewportY */,
+ WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @SmallTest
+ public void testYuvRendering() {
+ // Create EGL base with a pixel buffer as display output.
+ EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+ eglBase.makeCurrent();
+
+ // Create YUV byte buffer planes with random content.
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ final Random random = new Random(SEED);
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
+ random.nextBytes(yuvPlanes[i].array());
+ }
+
+ // Generate 3 texture ids for Y/U/V.
+ final int yuvTextures[] = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ // Upload the YUV byte buffer data as textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
+ GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
+ GlUtil.checkNoGLES2Error("glTexImage2D");
+ }
+
+ // Draw the YUV frame onto the pixel buffer.
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawYuv(yuvTextures, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */,
+ 0 /* viewportY */, WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Compare the YUV data with the RGBA result.
+ for (int y = 0; y < HEIGHT; ++y) {
+ for (int x = 0; x < WIDTH; ++x) {
+ // YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
+ // fragment shader code in GlGenericDrawer.
+ final float y_luma = normalizedByte(yuvPlanes[0].get());
+ final float u_chroma = normalizedByte(yuvPlanes[1].get());
+ final float v_chroma = normalizedByte(yuvPlanes[2].get());
+ // Expected color in unrounded RGB [0.0f, 255.0f].
+ final float expectedRed =
+ saturatedConvert(1.16438f * y_luma + 1.59603f * v_chroma - 0.874202f);
+ final float expectedGreen = saturatedConvert(
+ 1.16438f * y_luma - 0.391762f * u_chroma - 0.812968f * v_chroma + 0.531668f);
+ final float expectedBlue =
+ saturatedConvert(1.16438f * y_luma + 2.01723f * u_chroma - 1.08563f);
+
+ // Actual color in RGB8888.
+ final int actualRed = data.get() & 0xFF;
+ final int actualGreen = data.get() & 0xFF;
+ final int actualBlue = data.get() & 0xFF;
+ final int actualAlpha = data.get() & 0xFF;
+
+ // Assert rendered image is close to pixel perfect from source YUV.
+ assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
+ assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
+ assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
+ assertEquals(actualAlpha, 255);
+ }
+ }
+
+ drawer.release();
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ eglBase.release();
+ }
+
+ /**
+ * The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
+ * create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
+ * with creating OES textures in the following way:
+ * - Create SurfaceTexture with help from SurfaceTextureHelper.
+ * - Create an EglBase with the SurfaceTexture as EGLSurface.
+ * - Upload RGB texture with known content.
+ * - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
+ * - Wait for an OES texture to be produced.
+ * The actual oesDraw() test is this:
+ * - Create an EglBase with a pixel buffer as target.
+ * - Render the OES texture onto the pixel buffer.
+ * - Read back the pixel buffer and compare it with the known RGB data.
+ */
+ // TODO(titovartem) make correct fix during webrtc:9175
+ @SuppressWarnings("ByteBufferBackingArray")
+ @Test
+ @MediumTest
+ public void testOesRendering() throws InterruptedException {
+ /**
+ * Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
+ */
+ class StubOesTextureProducer {
+ private final EglBase eglBase;
+ private final GlRectDrawer drawer;
+ private final int rgbTexture;
+
+ public StubOesTextureProducer(EglBase.Context sharedContext,
+ SurfaceTextureHelper surfaceTextureHelper, int width, int height) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
+ surfaceTextureHelper.setTextureSize(width, height);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ drawer = new GlRectDrawer();
+
+ eglBase.makeCurrent();
+ rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+
+ public void draw(ByteBuffer rgbPlane) {
+ eglBase.makeCurrent();
+
+ // Upload RGB data to texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
+ GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+ // Draw the RGB data onto the SurfaceTexture.
+ drawer.drawRgb(rgbTexture, IDENTITY_MATRIX, WIDTH, HEIGHT, 0 /* viewportX */,
+ 0 /* viewportY */, WIDTH, HEIGHT);
+ eglBase.swapBuffers();
+ }
+
+ public void release() {
+ eglBase.makeCurrent();
+ drawer.release();
+ GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+ eglBase.release();
+ }
+ }
+
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(WIDTH, HEIGHT);
+
+ // Create resources for generating OES textures.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
+ eglBase.getEglBaseContext(), surfaceTextureHelper, WIDTH, HEIGHT);
+ final SurfaceTextureHelperTest.MockTextureListener listener =
+ new SurfaceTextureHelperTest.MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+
+ // Create RGB byte buffer plane with random content.
+ final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+ final Random random = new Random(SEED);
+ random.nextBytes(rgbPlane.array());
+
+ // Draw the frame and block until an OES texture is delivered.
+ oesProducer.draw(rgbPlane);
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+
+ // Real test starts here.
+ // Draw the OES texture on the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ WIDTH, HEIGHT, 0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+ GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is pixel perfect to source RGB.
+ assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+ drawer.release();
+ textureBuffer.release();
+ oesProducer.release();
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
new file mode 100644
index 0000000000..092d617270
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java
@@ -0,0 +1,507 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public class HardwareVideoEncoderTest {
+ @Parameters(name = "textures={0};eglContext={1}")
+ public static Collection<Object[]> parameters() {
+ return Arrays.asList(new Object[] {/*textures=*/false, /*eglContext=*/false},
+ new Object[] {/*textures=*/true, /*eglContext=*/false},
+ new Object[] {/*textures=*/true, /*eglContext=*/true});
+ }
+
+ private final boolean useTextures;
+ private final boolean useEglContext;
+
+ public HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext) {
+ this.useTextures = useTextures;
+ this.useEglContext = useEglContext;
+ }
+
+ final static String TAG = "HwVideoEncoderTest";
+
+ private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
+ private static final boolean ENABLE_H264_HIGH_PROFILE = true;
+ private static final VideoEncoder.Settings SETTINGS =
+ new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
+ 30 /* fps */, 1 /* numberOfSimulcastStreams */, true /* automaticResizeOn */,
+ /* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
+ private static final int ENCODE_TIMEOUT_MS = 1000;
+ private static final int NUM_TEST_FRAMES = 10;
+ private static final int NUM_ENCODE_TRIES = 100;
+ private static final int ENCODE_RETRY_SLEEP_MS = 1;
+ private static final int PIXEL_ALIGNMENT_REQUIRED = 16;
+ private static final boolean APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS = false;
+
+ // # Mock classes
+ /**
+ * Mock encoder callback that allows easy verification of the general properties of the encoded
+ * frame such as width and height. Also used from AndroidVideoDecoderInstrumentationTest.
+ */
+ static class MockEncoderCallback implements VideoEncoder.Callback {
+ private BlockingQueue<EncodedImage> frameQueue = new LinkedBlockingQueue<>();
+
+ @Override
+ public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
+ assertNotNull(frame);
+ assertNotNull(info);
+
+ // Make a copy because keeping a reference to the buffer is not allowed.
+ final ByteBuffer bufferCopy = ByteBuffer.allocateDirect(frame.buffer.remaining());
+ bufferCopy.put(frame.buffer);
+ bufferCopy.rewind();
+
+ frameQueue.offer(EncodedImage.builder()
+ .setBuffer(bufferCopy, null)
+ .setEncodedWidth(frame.encodedWidth)
+ .setEncodedHeight(frame.encodedHeight)
+ .setCaptureTimeNs(frame.captureTimeNs)
+ .setFrameType(frame.frameType)
+ .setRotation(frame.rotation)
+ .setQp(frame.qp)
+ .createEncodedImage());
+ }
+
+ public EncodedImage poll() {
+ try {
+ EncodedImage image = frameQueue.poll(ENCODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ assertNotNull("Timed out waiting for the frame to be encoded.", image);
+ return image;
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void assertFrameEncoded(VideoFrame frame) {
+ final VideoFrame.Buffer buffer = frame.getBuffer();
+ final EncodedImage image = poll();
+ assertTrue(image.buffer.capacity() > 0);
+ assertEquals(image.encodedWidth, buffer.getWidth());
+ assertEquals(image.encodedHeight, buffer.getHeight());
+ assertEquals(image.captureTimeNs, frame.getTimestampNs());
+ assertEquals(image.rotation, frame.getRotation());
+ }
+ }
+
+ /** A common base class for the texture and I420 buffer that implements reference counting. */
+ private static abstract class MockBufferBase implements VideoFrame.Buffer {
+ protected final int width;
+ protected final int height;
+ private final Runnable releaseCallback;
+ private final Object refCountLock = new Object();
+ private int refCount = 1;
+
+ public MockBufferBase(int width, int height, Runnable releaseCallback) {
+ this.width = width;
+ this.height = height;
+ this.releaseCallback = releaseCallback;
+ }
+
+ @Override
+ public int getWidth() {
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ return height;
+ }
+
+ @Override
+ public void retain() {
+ synchronized (refCountLock) {
+ assertTrue("Buffer retained after being destroyed.", refCount > 0);
+ ++refCount;
+ }
+ }
+
+ @Override
+ public void release() {
+ synchronized (refCountLock) {
+ assertTrue("Buffer released too many times.", --refCount >= 0);
+ if (refCount == 0) {
+ releaseCallback.run();
+ }
+ }
+ }
+ }
+
+ private static class MockTextureBuffer
+ extends MockBufferBase implements VideoFrame.TextureBuffer {
+ private final int textureId;
+
+ public MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback) {
+ super(width, height, releaseCallback);
+ this.textureId = textureId;
+ }
+
+ @Override
+ public VideoFrame.TextureBuffer.Type getType() {
+ return VideoFrame.TextureBuffer.Type.OES;
+ }
+
+ @Override
+ public int getTextureId() {
+ return textureId;
+ }
+
+ @Override
+ public Matrix getTransformMatrix() {
+ return new Matrix();
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ return JavaI420Buffer.allocate(width, height);
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ retain();
+ return new MockTextureBuffer(textureId, scaleWidth, scaleHeight, this ::release);
+ }
+ }
+
+ private static class MockI420Buffer extends MockBufferBase implements VideoFrame.I420Buffer {
+ private final JavaI420Buffer realBuffer;
+
+ public MockI420Buffer(int width, int height, Runnable releaseCallback) {
+ super(width, height, releaseCallback);
+ realBuffer = JavaI420Buffer.allocate(width, height);
+ }
+
+ @Override
+ public ByteBuffer getDataY() {
+ return realBuffer.getDataY();
+ }
+
+ @Override
+ public ByteBuffer getDataU() {
+ return realBuffer.getDataU();
+ }
+
+ @Override
+ public ByteBuffer getDataV() {
+ return realBuffer.getDataV();
+ }
+
+ @Override
+ public int getStrideY() {
+ return realBuffer.getStrideY();
+ }
+
+ @Override
+ public int getStrideU() {
+ return realBuffer.getStrideU();
+ }
+
+ @Override
+ public int getStrideV() {
+ return realBuffer.getStrideV();
+ }
+
+ @Override
+ public VideoFrame.I420Buffer toI420() {
+ retain();
+ return this;
+ }
+
+ @Override
+ public void retain() {
+ super.retain();
+ realBuffer.retain();
+ }
+
+ @Override
+ public void release() {
+ super.release();
+ realBuffer.release();
+ }
+
+ @Override
+ public VideoFrame.Buffer cropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ return realBuffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ }
+ }
+
+ // # Test fields
+ private final Object referencedFramesLock = new Object();
+ private int referencedFrames;
+
+ private Runnable releaseFrameCallback = new Runnable() {
+ @Override
+ public void run() {
+ synchronized (referencedFramesLock) {
+ --referencedFrames;
+ }
+ }
+ };
+
+ private EglBase14 eglBase;
+ private long lastTimestampNs;
+
+ // # Helper methods
+ private VideoEncoderFactory createEncoderFactory(EglBase.Context eglContext) {
+ return new HardwareVideoEncoderFactory(
+ eglContext, ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
+ }
+
+ private @Nullable VideoEncoder createEncoder() {
+ VideoEncoderFactory factory =
+ createEncoderFactory(useEglContext ? eglBase.getEglBaseContext() : null);
+ VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
+ return factory.createEncoder(supportedCodecs[0]);
+ }
+
+ private VideoFrame generateI420Frame(int width, int height) {
+ synchronized (referencedFramesLock) {
+ ++referencedFrames;
+ }
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
+ VideoFrame.Buffer buffer = new MockI420Buffer(width, height, releaseFrameCallback);
+ return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
+ }
+
+ private VideoFrame generateTextureFrame(int width, int height) {
+ synchronized (referencedFramesLock) {
+ ++referencedFrames;
+ }
+ final int textureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
+ VideoFrame.Buffer buffer =
+ new MockTextureBuffer(textureId, width, height, releaseFrameCallback);
+ return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
+ }
+
+ private VideoFrame generateFrame(int width, int height) {
+ return useTextures ? generateTextureFrame(width, height) : generateI420Frame(width, height);
+ }
+
+ static VideoCodecStatus testEncodeFrame(
+ VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info) {
+ int numTries = 0;
+
+ // It takes a while for the encoder to become ready so try until it accepts the frame.
+ while (true) {
+ ++numTries;
+
+ final VideoCodecStatus returnValue = encoder.encode(frame, info);
+ switch (returnValue) {
+ case OK: // Success
+ // Fall through
+ case ERR_SIZE: // Wrong size
+ return returnValue;
+ case NO_OUTPUT:
+ if (numTries >= NUM_ENCODE_TRIES) {
+ fail("encoder.encode keeps returning NO_OUTPUT");
+ }
+ try {
+ Thread.sleep(ENCODE_RETRY_SLEEP_MS); // Try again.
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ break;
+ default:
+ fail("encoder.encode returned: " + returnValue); // Error
+ }
+ }
+ }
+
+ private static int getAlignedNumber(int number, int alignment) {
+ return (number / alignment) * alignment;
+ }
+
+ public static int getPixelAlignmentRequired() {
+ return PIXEL_ALIGNMENT_REQUIRED;
+ }
+
+ // # Tests
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+
+ eglBase = EglBase.createEgl14(EglBase.CONFIG_PLAIN);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ lastTimestampNs = System.nanoTime();
+ }
+
+ @After
+ public void tearDown() {
+ eglBase.release();
+ synchronized (referencedFramesLock) {
+ assertEquals("All frames were not released", 0, referencedFrames);
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testInitialize() {
+ VideoEncoder encoder = createEncoder();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncode() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ for (int i = 0; i < NUM_TEST_FRAMES; i++) {
+ Log.d(TAG, "Test frame: " + i);
+ VideoFrame frame = generateFrame(SETTINGS.width, SETTINGS.height);
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+ testEncodeFrame(encoder, frame, info);
+
+ callback.assertFrameEncoded(frame);
+ frame.release();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeAltenatingBuffers() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ for (int i = 0; i < NUM_TEST_FRAMES; i++) {
+ Log.d(TAG, "Test frame: " + i);
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateTextureFrame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ frame = generateI420Frame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+ }
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeDifferentSizes() {
+ VideoEncoder encoder = createEncoder();
+ MockEncoderCallback callback = new MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ frame = generateFrame(SETTINGS.width, SETTINGS.height);
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame.
+ // Force the size of input frame with the greatest multiple of 16 below the original size.
+ frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED),
+ getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED));
+ testEncodeFrame(encoder, frame, info);
+ callback.assertFrameEncoded(frame);
+ frame.release();
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testEncodeAlignmentCheck() {
+ VideoEncoder encoder = createEncoder();
+ org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback callback =
+ new org.webrtc.HardwareVideoEncoderTest.MockEncoderCallback();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ VideoFrame frame;
+ VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
+ new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
+
+ frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
+ assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ // Android MediaCodec only guarantees of proper operation with 16-pixel-aligned input frame.
+ // Following input frame with non-aligned size would return ERR_SIZE.
+ frame = generateFrame(SETTINGS.width / 4, SETTINGS.height / 4);
+ assertNotEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ // Since our encoder has returned with an error, we reinitialize the encoder.
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
+
+ frame = generateFrame(getAlignedNumber(SETTINGS.width / 4, PIXEL_ALIGNMENT_REQUIRED),
+ getAlignedNumber(SETTINGS.height / 4, PIXEL_ALIGNMENT_REQUIRED));
+ assertEquals(VideoCodecStatus.OK, testEncodeFrame(encoder, frame, info));
+ frame.release();
+
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+
+ @Test
+ @SmallTest
+ public void testGetEncoderInfo() {
+ VideoEncoder encoder = createEncoder();
+ assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
+ VideoEncoder.EncoderInfo info = encoder.getEncoderInfo();
+ assertEquals(PIXEL_ALIGNMENT_REQUIRED, info.getRequestedResolutionAlignment());
+ assertEquals(
+ APPLY_ALIGNMENT_TO_ALL_SIMULCAST_LAYERS, info.getApplyAlignmentToAllSimulcastLayers());
+ assertEquals(VideoCodecStatus.OK, encoder.release());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java
new file mode 100644
index 0000000000..780eeb6197
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/LoggableTest.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import org.junit.Test;
+import org.webrtc.Loggable;
+import org.webrtc.Logging.Severity;
+import org.webrtc.PeerConnectionFactory;
+
+public class LoggableTest {
+ private static String TAG = "LoggableTest";
+ private static String NATIVE_FILENAME_TAG = "loggable_test.cc";
+
+ private static class MockLoggable implements Loggable {
+ private ArrayList<String> messages = new ArrayList<>();
+ private ArrayList<Severity> sevs = new ArrayList<>();
+ private ArrayList<String> tags = new ArrayList<>();
+
+ @Override
+ public void onLogMessage(String message, Severity sev, String tag) {
+ messages.add(message);
+ sevs.add(sev);
+ tags.add(tag);
+ }
+
+ public boolean isMessageReceived(String message) {
+ for (int i = 0; i < messages.size(); i++) {
+ if (messages.get(i).contains(message)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public boolean isMessageReceived(String message, Severity sev, String tag) {
+ for (int i = 0; i < messages.size(); i++) {
+ if (messages.get(i).contains(message) && sevs.get(i) == sev && tags.get(i).equals(tag)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ private final MockLoggable mockLoggable = new MockLoggable();
+
+ @Test
+ @SmallTest
+ public void testLoggableSetWithoutError() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @Test
+ @SmallTest
+ public void testMessageIsLoggedCorrectly() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should be logged";
+ Logging.d(TAG, msg);
+ assertTrue(mockLoggable.isMessageReceived(msg, Severity.LS_INFO, TAG));
+ }
+
+ @Test
+ @SmallTest
+ public void testLowSeverityIsFiltered() throws InterruptedException {
+ // Set severity to LS_WARNING to filter out LS_INFO and below.
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_WARNING)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ Logging.d(TAG, msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testLoggableDoesNotReceiveMessagesAfterUnsetting() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ // Reinitialize without Loggable
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ Logging.d(TAG, msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeMessageIsLoggedCorrectly() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should be logged";
+ nativeLogInfoTestMessage(msg);
+ assertTrue(mockLoggable.isMessageReceived(msg, Severity.LS_INFO, NATIVE_FILENAME_TAG));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeLowSeverityIsFiltered() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_WARNING)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ nativeLogInfoTestMessage(msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ @Test
+ @SmallTest
+ public void testNativeLoggableDoesNotReceiveMessagesAfterUnsetting() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setInjectableLogger(mockLoggable, Severity.LS_INFO)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ // Reinitialize without Loggable
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ String msg = "Message that should NOT be logged";
+ nativeLogInfoTestMessage(msg);
+ assertFalse(mockLoggable.isMessageReceived(msg));
+ }
+
+ private static native void nativeLogInfoTestMessage(String message);
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
new file mode 100644
index 0000000000..b646f1f4eb
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/NetworkMonitorTest.java
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.CALLS_REAL_METHODS;
+import static org.mockito.Mockito.mock;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.Intent;
+import android.net.ConnectivityManager;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkRequest;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.webrtc.NetworkChangeDetector.ConnectionType;
+import org.webrtc.NetworkChangeDetector.NetworkInformation;
+import org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
+import org.webrtc.NetworkMonitorAutoDetect.NetworkState;
+import org.webrtc.NetworkMonitorAutoDetect.SimpleNetworkCallback;
+
+/**
+ * Tests for org.webrtc.NetworkMonitor.
+ *
+ * TODO(deadbeef): These tests don't cover the interaction between
+ * NetworkManager.java and androidnetworkmonitor.cc, which is how this
+ * class is used in practice in WebRTC.
+ */
+@SuppressLint("NewApi")
+public class NetworkMonitorTest {
+ private static final long INVALID_NET_ID = -1;
+ private NetworkChangeDetector detector;
+ private String fieldTrialsString = "";
+
+ /**
+ * Listens for alerts fired by the NetworkMonitor when network status changes.
+ */
+ private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
+ private boolean receivedNotification;
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType connectionType) {
+ receivedNotification = true;
+ }
+
+ public boolean hasReceivedNotification() {
+ return receivedNotification;
+ }
+
+ public void resetHasReceivedNotification() {
+ receivedNotification = false;
+ }
+ }
+
+ /**
+ * Mocks out calls to the ConnectivityManager.
+ */
+ private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
+ private boolean activeNetworkExists;
+ private int networkType;
+ private int networkSubtype;
+ private int underlyingNetworkTypeForVpn;
+ private int underlyingNetworkSubtypeForVpn;
+
+ MockConnectivityManagerDelegate() {
+ this(new HashSet<>(), "");
+ }
+
+ MockConnectivityManagerDelegate(Set<Network> availableNetworks, String fieldTrialsString) {
+ super((ConnectivityManager) null, availableNetworks, fieldTrialsString);
+ }
+
+ @Override
+ public NetworkState getNetworkState() {
+ return new NetworkState(activeNetworkExists, networkType, networkSubtype,
+ underlyingNetworkTypeForVpn, underlyingNetworkSubtypeForVpn);
+ }
+
+ // Dummy implementations to avoid NullPointerExceptions in default implementations:
+
+ @Override
+ public long getDefaultNetId() {
+ return INVALID_NET_ID;
+ }
+
+ @Override
+ public Network[] getAllNetworks() {
+ return new Network[0];
+ }
+
+ @Override
+ public NetworkState getNetworkState(Network network) {
+ return new NetworkState(false, -1, -1, -1, -1);
+ }
+
+ public void setActiveNetworkExists(boolean networkExists) {
+ activeNetworkExists = networkExists;
+ }
+
+ public void setNetworkType(int networkType) {
+ this.networkType = networkType;
+ }
+
+ public void setNetworkSubtype(int networkSubtype) {
+ this.networkSubtype = networkSubtype;
+ }
+
+ public void setUnderlyingNetworkType(int underlyingNetworkTypeForVpn) {
+ this.underlyingNetworkTypeForVpn = underlyingNetworkTypeForVpn;
+ }
+
+ public void setUnderlyingNetworkSubype(int underlyingNetworkSubtypeForVpn) {
+ this.underlyingNetworkSubtypeForVpn = underlyingNetworkSubtypeForVpn;
+ }
+ }
+
+ /**
+ * Mocks out calls to the WifiManager.
+ */
+ private static class MockWifiManagerDelegate
+ extends NetworkMonitorAutoDetect.WifiManagerDelegate {
+ private String wifiSSID;
+
+ @Override
+ public String getWifiSSID() {
+ return wifiSSID;
+ }
+
+ public void setWifiSSID(String wifiSSID) {
+ this.wifiSSID = wifiSSID;
+ }
+ }
+
+ // A dummy NetworkMonitorAutoDetect.Observer.
+ private static class TestNetworkMonitorAutoDetectObserver
+ extends NetworkMonitorAutoDetect.Observer {
+ final String fieldTrialsString;
+
+ TestNetworkMonitorAutoDetectObserver(String fieldTrialsString) {
+ this.fieldTrialsString = fieldTrialsString;
+ }
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
+
+ @Override
+ public void onNetworkConnect(NetworkInformation networkInfo) {}
+
+ @Override
+ public void onNetworkDisconnect(long networkHandle) {}
+
+ @Override
+ public void onNetworkPreference(List<ConnectionType> types, @NetworkPreference int preference) {
+ }
+
+ // @Override
+ // public String getFieldTrialsString() {
+ // return fieldTrialsString;
+ // }
+ }
+
+ private NetworkMonitorAutoDetect receiver;
+ private MockConnectivityManagerDelegate connectivityDelegate;
+ private MockWifiManagerDelegate wifiDelegate;
+
+ /**
+ * Helper method to create a network monitor and delegates for testing.
+ */
+ private void createTestMonitor() {
+ Context context = InstrumentationRegistry.getTargetContext();
+
+ NetworkMonitor.getInstance().setNetworkChangeDetectorFactory(
+ new NetworkChangeDetectorFactory() {
+ @Override
+ public NetworkChangeDetector create(
+ NetworkChangeDetector.Observer observer, Context context) {
+ detector = new NetworkMonitorAutoDetect(observer, context);
+ return detector;
+ }
+ });
+
+ receiver = NetworkMonitor.createAndSetAutoDetectForTest(context, fieldTrialsString);
+ assertNotNull(receiver);
+
+ connectivityDelegate = new MockConnectivityManagerDelegate();
+ connectivityDelegate.setActiveNetworkExists(true);
+ receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
+
+ wifiDelegate = new MockWifiManagerDelegate();
+ receiver.setWifiManagerDelegateForTests(wifiDelegate);
+ wifiDelegate.setWifiSSID("foo");
+ }
+
+ private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
+ final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
+ return NetworkMonitorAutoDetect.getConnectionType(networkState);
+ }
+
+ @Before
+ public void setUp() {
+ ContextUtils.initialize(InstrumentationRegistry.getTargetContext());
+ createTestMonitor();
+ }
+
+ /**
+ * Tests that the receiver registers for connectivity intents during construction.
+ */
+ @Test
+ @SmallTest
+ public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
+ Context context = InstrumentationRegistry.getTargetContext();
+
+ NetworkMonitorAutoDetect.Observer observer =
+ new TestNetworkMonitorAutoDetectObserver(fieldTrialsString);
+
+ NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
+
+ assertTrue(receiver.isReceiverRegisteredForTesting());
+ }
+
+ /**
+ * Tests that when there is an intent indicating a change in network connectivity, it sends a
+ * notification to Java observers.
+ */
+ @Test
+ @MediumTest
+ public void testNetworkMonitorJavaObservers() throws InterruptedException {
+ // Initialize the NetworkMonitor with a connection.
+ Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+
+ // We shouldn't be re-notified if the connection hasn't actually changed.
+ NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
+ NetworkMonitor.addNetworkObserver(observer);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("bar");
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // We should be notified when we change to Wifi.
+ connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We should be notified when the Wifi SSID changes.
+ wifiDelegate.setWifiSSID("foo");
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ observer.resetHasReceivedNotification();
+
+ // We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
+ assertFalse(observer.hasReceivedNotification());
+
+ // Mimic that connectivity has been lost and ensure that the observer gets the notification.
+ connectivityDelegate.setActiveNetworkExists(false);
+ Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+ receiver.onReceive(InstrumentationRegistry.getTargetContext(), noConnectivityIntent);
+ assertTrue(observer.hasReceivedNotification());
+ }
+
+ /**
+ * Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
+ * active network connections so it cannot usefully check results, but it can at least check
+ * that the functions don't crash.
+ */
+ @Test
+ @SmallTest
+ public void testConnectivityManagerDelegateDoesNotCrash() {
+ ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
+ InstrumentationRegistry.getTargetContext(), new HashSet<>(), fieldTrialsString);
+ delegate.getNetworkState();
+ Network[] networks = delegate.getAllNetworks();
+ if (networks.length >= 1) {
+ delegate.getNetworkState(networks[0]);
+ delegate.hasInternetCapability(networks[0]);
+ }
+ delegate.getDefaultNetId();
+ }
+
+ /** Tests that ConnectivityManagerDelegate preferentially reads from the cache */
+ @Test
+ @SmallTest
+ public void testConnectivityManagerDelegatePreferentiallyReadsFromCache() {
+ final Set<Network> availableNetworks = new HashSet<>();
+ ConnectivityManagerDelegate delegate = new ConnectivityManagerDelegate(
+ (ConnectivityManager) InstrumentationRegistry.getTargetContext().getSystemService(
+ Context.CONNECTIVITY_SERVICE),
+ availableNetworks, "getAllNetworksFromCache:true");
+
+ Network[] networks = delegate.getAllNetworks();
+ assertTrue(networks.length == 0);
+
+ final Network mockNetwork = mock(Network.class);
+ availableNetworks.add(mockNetwork);
+
+ assertArrayEquals(new Network[] {mockNetwork}, delegate.getAllNetworks());
+ }
+
+ /** Tests field trial parsing */
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_requestVPN_disabled() {
+ NetworkRequest request =
+ getNetworkRequestForFieldTrials("anyothertext,requestVPN:false,anyothertext");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_requestVPN_enabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("requestVPN:true");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .removeCapability(NetworkCapabilities.NET_CAPABILITY_NOT_VPN)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_includeOtherUidNetworks_disabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("includeOtherUidNetworks:false");
+ assertTrue(request.equals(new NetworkRequest.Builder()
+ .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+ .build()));
+ }
+
+ @Test
+ @SmallTest
+ public void testConnectivityManager_includeOtherUidNetworks_enabled() {
+ NetworkRequest request = getNetworkRequestForFieldTrials("includeOtherUidNetworks:true");
+ NetworkRequest.Builder builder =
+ new NetworkRequest.Builder().addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
+ builder.setIncludeOtherUidNetworks(true);
+ }
+ assertTrue(request.equals(builder.build()));
+ }
+
+ private NetworkRequest getNetworkRequestForFieldTrials(String fieldTrialsString) {
+ return new ConnectivityManagerDelegate(
+ (ConnectivityManager) null, new HashSet<>(), fieldTrialsString)
+ .createNetworkRequest();
+ }
+
+ /**
+ * Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
+ * on having any active network connections so it cannot usefully check results, but it can at
+ * least check that the functions don't crash.
+ */
+ @Test
+ @SmallTest
+ public void testQueryableAPIsDoNotCrash() {
+ NetworkMonitorAutoDetect.Observer observer =
+ new TestNetworkMonitorAutoDetectObserver(fieldTrialsString);
+ NetworkMonitorAutoDetect ncn =
+ new NetworkMonitorAutoDetect(observer, InstrumentationRegistry.getTargetContext());
+ ncn.getDefaultNetId();
+ }
+
+ /**
+ * Tests startMonitoring and stopMonitoring correctly set the autoDetect and number of observers.
+ */
+ @Test
+ @SmallTest
+ public void testStartStopMonitoring() {
+ NetworkMonitor networkMonitor = NetworkMonitor.getInstance();
+ Context context = ContextUtils.getApplicationContext();
+ networkMonitor.startMonitoring(context, fieldTrialsString);
+ assertEquals(1, networkMonitor.getNumObservers());
+ assertEquals(detector, networkMonitor.getNetworkChangeDetector());
+ networkMonitor.stopMonitoring();
+ assertEquals(0, networkMonitor.getNumObservers());
+ assertNull(networkMonitor.getNetworkChangeDetector());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java
new file mode 100644
index 0000000000..f71bd36063
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionEndToEndTest.java
@@ -0,0 +1,1641 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.IceGatheringState;
+import org.webrtc.PeerConnection.PeerConnectionState;
+import org.webrtc.PeerConnection.SignalingState;
+
+/** End-to-end tests for {@link PeerConnection}. */
+public class PeerConnectionEndToEndTest {
+ private static final String TAG = "PeerConnectionEndToEndTest";
+ private static final int DEFAULT_TIMEOUT_SECONDS = 20;
+ private static final int SHORT_TIMEOUT_SECONDS = 5;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ private static class ObserverExpectations
+ implements PeerConnection.Observer, VideoSink, DataChannel.Observer, StatsObserver,
+ RTCStatsCollectorCallback, RtpReceiver.Observer {
+ private final String name;
+ private int expectedIceCandidates;
+ private int expectedErrors;
+ private int expectedRenegotiations;
+ private int expectedWidth;
+ private int expectedHeight;
+ private int expectedFramesDelivered;
+ private int expectedTracksAdded;
+ private Queue<SignalingState> expectedSignalingChanges = new ArrayDeque<>();
+ private Queue<IceConnectionState> expectedIceConnectionChanges = new ArrayDeque<>();
+ private Queue<IceConnectionState> expectedStandardizedIceConnectionChanges = new ArrayDeque<>();
+ private Queue<PeerConnectionState> expectedConnectionChanges = new ArrayDeque<>();
+ private Queue<IceGatheringState> expectedIceGatheringChanges = new ArrayDeque<>();
+ private Queue<String> expectedAddStreamLabels = new ArrayDeque<>();
+ private Queue<String> expectedRemoveStreamLabels = new ArrayDeque<>();
+ private final List<IceCandidate> gotIceCandidates = new ArrayList<>();
+ private Map<MediaStream, WeakReference<VideoSink>> videoSinks = new IdentityHashMap<>();
+ private DataChannel dataChannel;
+ private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>();
+ private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>();
+ private Queue<String> expectedRemoteDataChannelLabels = new ArrayDeque<>();
+ private int expectedOldStatsCallbacks;
+ private int expectedNewStatsCallbacks;
+ private List<StatsReport[]> gotStatsReports = new ArrayList<>();
+ private final HashSet<MediaStream> gotRemoteStreams = new HashSet<>();
+ private int expectedFirstAudioPacket;
+ private int expectedFirstVideoPacket;
+
+ public ObserverExpectations(String name) {
+ this.name = name;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setDataChannel(DataChannel dataChannel) {
+ assertNull(this.dataChannel);
+ this.dataChannel = dataChannel;
+ this.dataChannel.registerObserver(this);
+ assertNotNull(this.dataChannel);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceCandidates(int count) {
+ expectedIceCandidates += count;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceCandidate(IceCandidate candidate) {
+ Logging.d(TAG, "onIceCandidate: " + candidate.toString());
+ --expectedIceCandidates;
+
+ // We don't assert expectedIceCandidates >= 0 because it's hard to know
+ // how many to expect, in general. We only use expectIceCandidates to
+ // assert a minimal count.
+ synchronized (gotIceCandidates) {
+ gotIceCandidates.add(candidate);
+ gotIceCandidates.notifyAll();
+ }
+ }
+
+ @Override
+ public void onIceCandidateError(IceCandidateErrorEvent event) {}
+
+ @Override
+ public void onIceCandidatesRemoved(IceCandidate[] candidates) {}
+
+ @Override
+ public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) {}
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void setExpectedResolution(int width, int height) {
+ expectedWidth = width;
+ expectedHeight = height;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectFramesDelivered(int count) {
+ expectedFramesDelivered += count;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(VideoFrame frame) {
+ if (expectedFramesDelivered <= 0) {
+ return;
+ }
+ assertTrue(expectedWidth > 0);
+ assertTrue(expectedHeight > 0);
+ assertEquals(expectedWidth, frame.getRotatedWidth());
+ assertEquals(expectedHeight, frame.getRotatedHeight());
+ --expectedFramesDelivered;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectSignalingChange(SignalingState newState) {
+ expectedSignalingChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onSignalingChange(SignalingState newState) {
+ assertEquals(expectedSignalingChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceConnectionChange(IceConnectionState newState) {
+ expectedIceConnectionChanges.add(newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectStandardizedIceConnectionChange(IceConnectionState newState) {
+ expectedStandardizedIceConnectionChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceConnectionChange(IceConnectionState newState) {
+ // TODO(bemasc): remove once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ if (newState.equals(IceConnectionState.COMPLETED)) {
+ return;
+ }
+
+ if (expectedIceConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected ICE connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedIceConnectionChanges.remove(), newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStandardizedIceConnectionChange(IceConnectionState newState) {
+ if (newState.equals(IceConnectionState.COMPLETED)) {
+ return;
+ }
+
+ if (expectedIceConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected standardized ICE connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedStandardizedIceConnectionChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectConnectionChange(PeerConnectionState newState) {
+ expectedConnectionChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onConnectionChange(PeerConnectionState newState) {
+ if (expectedConnectionChanges.isEmpty()) {
+ Logging.d(TAG, name + " got an unexpected DTLS connection change " + newState);
+ return;
+ }
+
+ assertEquals(expectedConnectionChanges.remove(), newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceConnectionReceivingChange(boolean receiving) {
+ Logging.d(TAG, name + " got an ICE connection receiving change " + receiving);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectIceGatheringChange(IceGatheringState newState) {
+ expectedIceGatheringChanges.add(newState);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onIceGatheringChange(IceGatheringState newState) {
+ // It's fine to get a variable number of GATHERING messages before
+ // COMPLETE fires (depending on how long the test runs) so we don't assert
+ // any particular count.
+ if (newState == IceGatheringState.GATHERING) {
+ return;
+ }
+ if (expectedIceGatheringChanges.isEmpty()) {
+ Logging.d(TAG, name + "Got an unexpected ICE gathering change " + newState);
+ }
+ assertEquals(expectedIceGatheringChanges.remove(), newState);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectAddStream(String label) {
+ expectedAddStreamLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onAddStream(MediaStream stream) {
+ assertEquals(expectedAddStreamLabels.remove(), stream.getId());
+ for (AudioTrack track : stream.audioTracks) {
+ assertEquals("audio", track.kind());
+ }
+ for (VideoTrack track : stream.videoTracks) {
+ assertEquals("video", track.kind());
+ track.addSink(this);
+ assertNull(videoSinks.put(stream, new WeakReference<VideoSink>(this)));
+ }
+ gotRemoteStreams.add(stream);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectRemoveStream(String label) {
+ expectedRemoveStreamLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onRemoveStream(MediaStream stream) {
+ assertEquals(expectedRemoveStreamLabels.remove(), stream.getId());
+ WeakReference<VideoSink> videoSink = videoSinks.remove(stream);
+ assertNotNull(videoSink);
+ assertNotNull(videoSink.get());
+ for (VideoTrack videoTrack : stream.videoTracks) {
+ videoTrack.removeSink(videoSink.get());
+ }
+ gotRemoteStreams.remove(stream);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectDataChannel(String label) {
+ expectedRemoteDataChannelLabels.add(label);
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onDataChannel(DataChannel remoteDataChannel) {
+ assertEquals(expectedRemoteDataChannelLabels.remove(), remoteDataChannel.label());
+ setDataChannel(remoteDataChannel);
+ assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectRenegotiationNeeded() {
+ ++expectedRenegotiations;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onRenegotiationNeeded() {
+ assertTrue(--expectedRenegotiations >= 0);
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectAddTrack(int expectedTracksAdded) {
+ this.expectedTracksAdded = expectedTracksAdded;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) {
+ expectedTracksAdded--;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectMessage(ByteBuffer expectedBuffer, boolean expectedBinary) {
+ expectedBuffers.add(new DataChannel.Buffer(expectedBuffer, expectedBinary));
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onMessage(DataChannel.Buffer buffer) {
+ DataChannel.Buffer expected = expectedBuffers.remove();
+ assertEquals(expected.binary, buffer.binary);
+ assertTrue(expected.data.equals(buffer.data));
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onBufferedAmountChange(long previousAmount) {
+ assertFalse(previousAmount == dataChannel.bufferedAmount());
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStateChange() {
+ assertEquals(expectedStateChanges.remove(), dataChannel.state());
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectStateChange(DataChannel.State state) {
+ expectedStateChanges.add(state);
+ }
+
+ // Old getStats callback.
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onComplete(StatsReport[] reports) {
+ if (--expectedOldStatsCallbacks < 0) {
+ throw new RuntimeException("Unexpected stats report: " + Arrays.toString(reports));
+ }
+ gotStatsReports.add(reports);
+ }
+
+ // New getStats callback.
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onStatsDelivered(RTCStatsReport report) {
+ if (--expectedNewStatsCallbacks < 0) {
+ throw new RuntimeException("Unexpected stats report: " + report);
+ }
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFirstPacketReceived(MediaStreamTrack.MediaType mediaType) {
+ if (mediaType == MediaStreamTrack.MediaType.MEDIA_TYPE_AUDIO) {
+ expectedFirstAudioPacket--;
+ } else {
+ expectedFirstVideoPacket--;
+ }
+ if (expectedFirstAudioPacket < 0 || expectedFirstVideoPacket < 0) {
+ throw new RuntimeException("Unexpected call of onFirstPacketReceived");
+ }
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectFirstPacketReceived() {
+ expectedFirstAudioPacket = 1;
+ expectedFirstVideoPacket = 1;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectOldStatsCallback() {
+ ++expectedOldStatsCallbacks;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void expectNewStatsCallback() {
+ ++expectedNewStatsCallbacks;
+ }
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized List<StatsReport[]> takeStatsReports() {
+ List<StatsReport[]> got = gotStatsReports;
+ gotStatsReports = new ArrayList<StatsReport[]>();
+ return got;
+ }
+
+ // Return a set of expectations that haven't been satisfied yet, possibly
+ // empty if no such expectations exist.
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized TreeSet<String> unsatisfiedExpectations() {
+ TreeSet<String> stillWaitingForExpectations = new TreeSet<String>();
+ if (expectedIceCandidates > 0) { // See comment in onIceCandidate.
+ stillWaitingForExpectations.add("expectedIceCandidates");
+ }
+ if (expectedErrors != 0) {
+ stillWaitingForExpectations.add("expectedErrors: " + expectedErrors);
+ }
+ if (expectedSignalingChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedSignalingChanges: " + expectedSignalingChanges.size());
+ }
+ if (expectedIceConnectionChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedIceConnectionChanges: " + expectedIceConnectionChanges.size());
+ }
+ if (expectedIceGatheringChanges.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedIceGatheringChanges: " + expectedIceGatheringChanges.size());
+ }
+ if (expectedAddStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedAddStreamLabels: " + expectedAddStreamLabels.size());
+ }
+ if (expectedRemoveStreamLabels.size() != 0) {
+ stillWaitingForExpectations.add(
+ "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
+ }
+ if (expectedFramesDelivered > 0) {
+ stillWaitingForExpectations.add("expectedFramesDelivered: " + expectedFramesDelivered);
+ }
+ if (!expectedBuffers.isEmpty()) {
+ stillWaitingForExpectations.add("expectedBuffers: " + expectedBuffers.size());
+ }
+ if (!expectedStateChanges.isEmpty()) {
+ stillWaitingForExpectations.add("expectedStateChanges: " + expectedStateChanges.size());
+ }
+ if (!expectedRemoteDataChannelLabels.isEmpty()) {
+ stillWaitingForExpectations.add(
+ "expectedRemoteDataChannelLabels: " + expectedRemoteDataChannelLabels.size());
+ }
+ if (expectedOldStatsCallbacks != 0) {
+ stillWaitingForExpectations.add("expectedOldStatsCallbacks: " + expectedOldStatsCallbacks);
+ }
+ if (expectedNewStatsCallbacks != 0) {
+ stillWaitingForExpectations.add("expectedNewStatsCallbacks: " + expectedNewStatsCallbacks);
+ }
+ if (expectedFirstAudioPacket > 0) {
+ stillWaitingForExpectations.add("expectedFirstAudioPacket: " + expectedFirstAudioPacket);
+ }
+ if (expectedFirstVideoPacket > 0) {
+ stillWaitingForExpectations.add("expectedFirstVideoPacket: " + expectedFirstVideoPacket);
+ }
+ if (expectedTracksAdded != 0) {
+ stillWaitingForExpectations.add("expectedAddedTrack: " + expectedTracksAdded);
+ }
+ return stillWaitingForExpectations;
+ }
+
+ public boolean waitForAllExpectationsToBeSatisfied(int timeoutSeconds) {
+ // TODO(fischman): problems with this approach:
+ // - come up with something better than a poll loop
+ // - avoid serializing expectations explicitly; the test is not as robust
+ // as it could be because it must place expectations between wait
+ // statements very precisely (e.g. frame must not arrive before its
+ // expectation, and expectation must not be registered so early as to
+ // stall a wait). Use callbacks to fire off dependent steps instead of
+ // explicitly waiting, so there can be just a single wait at the end of
+ // the test.
+ long endTime = System.currentTimeMillis() + 1000 * timeoutSeconds;
+ TreeSet<String> prev = null;
+ TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
+ while (!stillWaitingForExpectations.isEmpty()) {
+ if (!stillWaitingForExpectations.equals(prev)) {
+ Logging.d(TAG,
+ name + " still waiting at\n " + (new Throwable()).getStackTrace()[1]
+ + "\n for: " + Arrays.toString(stillWaitingForExpectations.toArray()));
+ }
+ if (endTime < System.currentTimeMillis()) {
+ Logging.d(TAG,
+ name + " timed out waiting for: "
+ + Arrays.toString(stillWaitingForExpectations.toArray()));
+ return false;
+ }
+ try {
+ Thread.sleep(10);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ prev = stillWaitingForExpectations;
+ stillWaitingForExpectations = unsatisfiedExpectations();
+ }
+ if (prev == null) {
+ Logging.d(
+ TAG, name + " didn't need to wait at\n " + (new Throwable()).getStackTrace()[1]);
+ }
+ return true;
+ }
+
+ // This methods return a list of all currently gathered ice candidates or waits until
+ // 1 candidate have been gathered.
+ public List<IceCandidate> getAtLeastOneIceCandidate() throws InterruptedException {
+ synchronized (gotIceCandidates) {
+ while (gotIceCandidates.isEmpty()) {
+ gotIceCandidates.wait();
+ }
+ return new ArrayList<IceCandidate>(gotIceCandidates);
+ }
+ }
+ }
+
+ // Sets the expected resolution for an ObserverExpectations once a frame
+ // has been captured.
+ private static class ExpectedResolutionSetter implements VideoSink {
+ private ObserverExpectations observer;
+
+ public ExpectedResolutionSetter(ObserverExpectations observer) {
+ this.observer = observer;
+ }
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrame(VideoFrame frame) {
+ // Because different camera devices (fake & physical) produce different
+ // resolutions, we only sanity-check the set sizes,
+ assertTrue(frame.getRotatedWidth() > 0);
+ assertTrue(frame.getRotatedHeight() > 0);
+ observer.setExpectedResolution(frame.getRotatedWidth(), frame.getRotatedHeight());
+ frame.retain();
+ }
+ }
+
+ private static class SdpObserverLatch implements SdpObserver {
+ private boolean success;
+ private @Nullable SessionDescription sdp;
+ private @Nullable String error;
+ private CountDownLatch latch = new CountDownLatch(1);
+
+ public SdpObserverLatch() {}
+
+ @Override
+ public void onCreateSuccess(SessionDescription sdp) {
+ this.sdp = sdp;
+ onSetSuccess();
+ }
+
+ @Override
+ public void onSetSuccess() {
+ success = true;
+ latch.countDown();
+ }
+
+ @Override
+ public void onCreateFailure(String error) {
+ onSetFailure(error);
+ }
+
+ @Override
+ public void onSetFailure(String error) {
+ this.error = error;
+ latch.countDown();
+ }
+
+ public boolean await() {
+ try {
+ assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
+ return getSuccess();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public boolean getSuccess() {
+ return success;
+ }
+
+ public @Nullable SessionDescription getSdp() {
+ return sdp;
+ }
+
+ public @Nullable String getError() {
+ return error;
+ }
+ }
+
+ // Return a weak reference to test that ownership is correctly held by
+ // PeerConnection, not by test code.
+ private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
+ PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
+ String audioTrackId, VideoSink videoSink) {
+ return addTracksToPC(factory, pc, videoSource, streamLabel, videoTrackId, audioTrackId,
+ videoSink, /*useAddStream=*/false);
+ }
+ private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
+ PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
+ String audioTrackId, VideoSink videoSink, boolean useAddStream) {
+ MediaStream lMS = factory.createLocalMediaStream(streamLabel);
+ VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
+ assertNotNull(videoTrack);
+ assertNotNull(videoSink);
+ videoTrack.addSink(videoSink);
+ lMS.addTrack(videoTrack);
+ // Just for fun, let's remove and re-add the track.
+ lMS.removeTrack(videoTrack);
+ lMS.addTrack(videoTrack);
+ lMS.addTrack(
+ factory.createAudioTrack(audioTrackId, factory.createAudioSource(new MediaConstraints())));
+ if (!useAddStream) {
+ // In Unified Plan, addTrack() is the preferred way of adding tracks.
+ for (AudioTrack track : lMS.audioTracks) {
+ pc.addTrack(track, Collections.singletonList(lMS.getId()));
+ }
+ for (VideoTrack track : lMS.videoTracks) {
+ pc.addTrack(track, Collections.singletonList(lMS.getId()));
+ }
+ } else {
+ // Only in Plan B is addStream() supported. Used by a legacy test not yet
+ // updated to Unified Plan.
+ // TODO(https://crbug.com/webrtc/13528): Remove use of addStream().
+ pc.addStream(lMS);
+ }
+ return new WeakReference<MediaStream>(lMS);
+ }
+
+ @Test
+ @MediumTest
+ public void testCompleteSession() throws Exception {
+ Metrics.enable();
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+ iceServers.add(PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ // We want to use the same camera for offerer & answerer, so create it here
+ // instead of in addTracksToPC.
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ videoCapturer.startCapture(640, 480, 30);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> oLMS =
+ addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+ "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations));
+
+ offeringExpectations.expectAddTrack(2);
+ answeringExpectations.expectAddTrack(2);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringExpectations.expectAddStream("offeredMediaStream");
+ // SCTP DataChannels are announced via OPEN messages over the established
+ // connection (not via SDP), so answeringExpectations can only register
+ // expecting the channel during ICE, below.
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ answeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+ "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
+ new ExpectedResolutionSetter(offeringExpectations));
+
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ offeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceCandidates(2);
+
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringExpectations.expectAddStream("answeredMediaStream");
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ //
+ // offeringExpectations.expectIceConnectionChange(
+ // IceConnectionState.COMPLETED);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+ assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+ assertEquals(offeringPC.getSenders().size(), 2);
+ assertEquals(offeringPC.getReceivers().size(), 2);
+ assertEquals(answeringPC.getSenders().size(), 2);
+ assertEquals(answeringPC.getReceivers().size(), 2);
+
+ offeringExpectations.expectFirstPacketReceived();
+ answeringExpectations.expectFirstPacketReceived();
+
+ for (RtpReceiver receiver : offeringPC.getReceivers()) {
+ receiver.SetObserver(offeringExpectations);
+ }
+
+ for (RtpReceiver receiver : answeringPC.getReceivers()) {
+ receiver.SetObserver(answeringExpectations);
+ }
+
+ // Wait for at least some frames to be delivered at each end (number
+ // chosen arbitrarily).
+ offeringExpectations.expectFramesDelivered(10);
+ answeringExpectations.expectFramesDelivered(10);
+
+ offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+ // See commentary about SCTP DataChannels above for why this is here.
+ answeringExpectations.expectDataChannel("offeringDC");
+ answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Test some of the RtpSender API.
+ RtpSender videoSender = null;
+ RtpSender audioSender = null;
+ for (RtpSender sender : offeringPC.getSenders()) {
+ if (sender.track().kind().equals("video")) {
+ videoSender = sender;
+ } else {
+ audioSender = sender;
+ }
+ }
+ assertNotNull(videoSender);
+ assertNotNull(audioSender);
+
+ // Set a bitrate limit for the outgoing video stream for the offerer.
+ RtpParameters rtpParameters = videoSender.getParameters();
+ assertNotNull(rtpParameters);
+ assertEquals(1, rtpParameters.encodings.size());
+ assertNull(rtpParameters.encodings.get(0).maxBitrateBps);
+ assertNull(rtpParameters.encodings.get(0).minBitrateBps);
+ assertNull(rtpParameters.encodings.get(0).maxFramerate);
+ assertNull(rtpParameters.encodings.get(0).numTemporalLayers);
+ assertNull(rtpParameters.encodings.get(0).scaleResolutionDownBy);
+ assertTrue(rtpParameters.encodings.get(0).rid.isEmpty());
+
+ rtpParameters.encodings.get(0).maxBitrateBps = 300000;
+ rtpParameters.encodings.get(0).minBitrateBps = 100000;
+ rtpParameters.encodings.get(0).maxFramerate = 20;
+ rtpParameters.encodings.get(0).numTemporalLayers = 2;
+ rtpParameters.encodings.get(0).scaleResolutionDownBy = 2.0;
+ assertTrue(videoSender.setParameters(rtpParameters));
+
+ // Create a DTMF sender.
+ DtmfSender dtmfSender = audioSender.dtmf();
+ assertNotNull(dtmfSender);
+ assertTrue(dtmfSender.canInsertDtmf());
+ assertTrue(dtmfSender.insertDtmf("123", 300, 100));
+
+ // Verify that we can read back the updated value.
+ rtpParameters = videoSender.getParameters();
+ assertEquals(300000, (int) rtpParameters.encodings.get(0).maxBitrateBps);
+ assertEquals(100000, (int) rtpParameters.encodings.get(0).minBitrateBps);
+ assertEquals(20, (int) rtpParameters.encodings.get(0).maxFramerate);
+ assertEquals(2, (int) rtpParameters.encodings.get(0).numTemporalLayers);
+ assertThat(rtpParameters.encodings.get(0).scaleResolutionDownBy).isEqualTo(2.0);
+
+ // Test send & receive UTF-8 text.
+ answeringExpectations.expectMessage(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ DataChannel.Buffer buffer =
+ new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ assertTrue(offeringExpectations.dataChannel.send(buffer));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Construct this binary message two different ways to ensure no
+ // shortcuts are taken.
+ ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+ for (byte i = 1; i < 6; ++i) {
+ expectedBinaryMessage.put(i);
+ }
+ expectedBinaryMessage.flip();
+ offeringExpectations.expectMessage(expectedBinaryMessage, true);
+ assertTrue(answeringExpectations.dataChannel.send(
+ new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ offeringExpectations.dataChannel.close();
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Test SetBitrate.
+ assertTrue(offeringPC.setBitrate(100000, 5000000, 500000000));
+ assertFalse(offeringPC.setBitrate(3, 2, 1));
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ videoCapturer.stopCapture();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ System.gc();
+ }
+
+ @Test
+ @MediumTest
+ public void testDataChannelOnlySession() throws Exception {
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory =
+ PeerConnectionFactory.builder().setOptions(options).createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+ iceServers.add(PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ // SCTP DataChannels are announced via OPEN messages over the established
+ // connection (not via SDP), so answeringExpectations can only register
+ // expecting the channel during ICE, below.
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ offeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceCandidates(2);
+
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+ assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+ assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+ offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+ // See commentary about SCTP DataChannels above for why this is here.
+ answeringExpectations.expectDataChannel("offeringDC");
+ answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Test send & receive UTF-8 text.
+ answeringExpectations.expectMessage(
+ ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ DataChannel.Buffer buffer =
+ new DataChannel.Buffer(ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+ assertTrue(offeringExpectations.dataChannel.send(buffer));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Construct this binary message two different ways to ensure no
+ // shortcuts are taken.
+ ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+ for (byte i = 1; i < 6; ++i) {
+ expectedBinaryMessage.put(i);
+ }
+ expectedBinaryMessage.flip();
+ offeringExpectations.expectMessage(expectedBinaryMessage, true);
+ assertTrue(answeringExpectations.dataChannel.send(
+ new DataChannel.Buffer(ByteBuffer.wrap(new byte[] {1, 2, 3, 4, 5}), true)));
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+ offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+ offeringExpectations.dataChannel.close();
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ factory.dispose();
+ System.gc();
+ }
+
+ // Tests that ICE candidates that are not allowed by an ICE transport type, thus not being
+ // signaled to the gathering PeerConnection, can be surfaced via configuration if allowed by the
+ // new ICE transport type, when RTCConfiguration.surfaceIceCandidatesOnIceTransportTypeChanged is
+ // true.
+ @Test
+ @SmallTest
+ public void testSurfaceIceCandidatesWhenIceTransportTypeChanged() throws Exception {
+ // For this test, we only need one PeerConnection to observe the behavior of gathering, and we
+ // create only the offering PC below.
+ //
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory =
+ PeerConnectionFactory.builder().setOptions(options).createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ // NONE would prevent any candidate being signaled to the PC.
+ rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.NONE;
+ // We must have the continual gathering enabled to allow the surfacing of candidates on the ICE
+ // transport type change.
+ rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+ rtcConfig.surfaceIceCandidatesOnIceTransportTypeChanged = true;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ // Create a data channel and set local description to kick off the ICE candidate gathering.
+ offeringExpectations.expectRenegotiationNeeded();
+ DataChannel offeringDC = offeringPC.createDataChannel("offeringDC", new DataChannel.Init());
+ assertEquals("offeringDC", offeringDC.label());
+
+ offeringExpectations.setDataChannel(offeringDC);
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+
+ // Wait until we satisfy all expectations in the setup.
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Add the expectation of gathering at least one candidate, which should however fail because of
+ // the transport type NONE.
+ offeringExpectations.expectIceCandidates(1);
+ assertFalse(offeringExpectations.waitForAllExpectationsToBeSatisfied(SHORT_TIMEOUT_SECONDS));
+
+ // Change the transport type and we should be able to meet the expectation of gathering this
+ // time.
+ rtcConfig.iceTransportsType = PeerConnection.IceTransportsType.ALL;
+ offeringPC.setConfiguration(rtcConfig);
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ }
+
+ @Test
+ @MediumTest
+ public void testTrackRemovalAndAddition() throws Exception {
+ // Allow loopback interfaces too since our Android devices often don't
+ // have those.
+ PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+ options.networkIgnoreMask = 0;
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setOptions(options)
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ List<PeerConnection.IceServer> iceServers = new ArrayList<>();
+ iceServers.add(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
+
+ PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+ // TODO(https://crbug.com/webrtc/13528): Update test not to use Plan B.
+ rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(rtcConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations answeringExpectations = new ObserverExpectations("PCTest:answerer");
+ PeerConnection answeringPC = factory.createPeerConnection(rtcConfig, answeringExpectations);
+ assertNotNull(answeringPC);
+
+ // We want to use the same camera for offerer & answerer, so create it here
+ // instead of in addTracksToPC.
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ videoCapturer.startCapture(640, 480, 30);
+
+ // Add offerer media stream.
+ offeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> oLMS =
+ addTracksToPC(factory, offeringPC, videoSource, "offeredMediaStream", "offeredVideoTrack",
+ "offeredAudioTrack", new ExpectedResolutionSetter(answeringExpectations),
+ /*useAddStream=*/true);
+
+ offeringExpectations.expectAddTrack(2);
+ answeringExpectations.expectAddTrack(2);
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ // Set local description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringExpectations.expectIceCandidates(2);
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringExpectations.expectAddStream("offeredMediaStream");
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Add answerer media stream.
+ answeringExpectations.expectRenegotiationNeeded();
+ WeakReference<MediaStream> aLMS = addTracksToPC(factory, answeringPC, videoSource,
+ "answeredMediaStream", "answeredVideoTrack", "answeredAudioTrack",
+ new ExpectedResolutionSetter(offeringExpectations),
+ /*useAddStream=*/true);
+
+ // Create answer.
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ // Set local description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringExpectations.expectIceCandidates(2);
+ answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTING);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringExpectations.expectAddStream("answeredMediaStream");
+
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ offeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+ // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+ // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+ //
+ // offeringExpectations.expectIceConnectionChange(
+ // IceConnectionState.COMPLETED);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CHECKING);
+ answeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CONNECTED);
+ answeringExpectations.expectConnectionChange(PeerConnectionState.CONNECTED);
+
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Wait for at least one ice candidate from the offering PC and forward them to the answering
+ // PC.
+ for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+ answeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for at least one ice candidate from the answering PC and forward them to the offering
+ // PC.
+ for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+ offeringPC.addIceCandidate(candidate);
+ }
+
+ // Wait for one frame of the correct size to be delivered.
+ // Otherwise we could get a dummy black frame of unexpcted size when the
+ // video track is removed.
+ offeringExpectations.expectFramesDelivered(1);
+ answeringExpectations.expectFramesDelivered(1);
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ assertEquals(PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+ assertEquals(PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+ // Now do another negotiation, removing the video track from one peer.
+ // This previously caused a crash on pc.dispose().
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5128
+ VideoTrack offererVideoTrack = oLMS.get().videoTracks.get(0);
+ // Note that when we call removeTrack, we regain responsibility for
+ // disposing of the track.
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererVideoTrack);
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Make sure the track was really removed.
+ MediaStream aRMS = answeringExpectations.gotRemoteStreams.iterator().next();
+ assertTrue(aRMS.videoTracks.isEmpty());
+
+ // Add the video track to test if the answeringPC will create a new track
+ // for the updated remote description.
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().addTrack(offererVideoTrack);
+ // The answeringPC sets the updated remote description with a track added.
+ // So the onAddTrack callback is expected to be called once.
+ answeringExpectations.expectAddTrack(1);
+ offeringExpectations.expectAddTrack(0);
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Finally, remove both the audio and video tracks, which should completely
+ // remove the remote stream. This used to trigger an assert.
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5128
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererVideoTrack);
+ AudioTrack offererAudioTrack = oLMS.get().audioTracks.get(0);
+ offeringExpectations.expectRenegotiationNeeded();
+ oLMS.get().removeTrack(offererAudioTrack);
+
+ answeringExpectations.expectRemoveStream("offeredMediaStream");
+ negotiate(offeringPC, offeringExpectations, answeringPC, answeringExpectations);
+
+ // Make sure the stream was really removed.
+ assertTrue(answeringExpectations.gotRemoteStreams.isEmpty());
+
+ // Free the Java-land objects and collect them.
+ shutdownPC(offeringPC, offeringExpectations);
+ offeringPC = null;
+ shutdownPC(answeringPC, answeringExpectations);
+ answeringPC = null;
+ offererVideoTrack.dispose();
+ offererAudioTrack.dispose();
+ videoCapturer.stopCapture();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ System.gc();
+ }
+
+ /**
+ * Test that a Java MediaStream is updated when the native stream is.
+ * <p>
+ * Specifically, test that when remote tracks are indicated as being added or
+ * removed from a MediaStream (via "a=ssrc" or "a=msid" in a remote
+ * description), the existing remote MediaStream object is updated.
+ * <p>
+ * This test starts with just an audio track, adds a video track, then
+ * removes it. It only applies remote offers, which is sufficient to test
+ * this functionality and simplifies the test. This means that no media will
+ * actually be sent/received; we're just testing that the Java MediaStream
+ * object gets updated when the native object changes.
+ */
+ @Test
+ @MediumTest
+ public void testRemoteStreamUpdatedWhenTracksAddedOrRemoved() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder()
+ .setVideoEncoderFactory(new SoftwareVideoEncoderFactory())
+ .setVideoDecoderFactory(new SoftwareVideoDecoderFactory())
+ .createPeerConnectionFactory();
+
+ // TODO(https://crbug.com/webrtc/13528): Update test not to use Plan B.
+ PeerConnection.RTCConfiguration planBConfig =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ planBConfig.sdpSemantics = PeerConnection.SdpSemantics.PLAN_B;
+
+ // Use OfferToReceiveAudio/Video to ensure every offer has an audio and
+ // video m= section. Simplifies the test because it means we don't have to
+ // actually apply the offer to "offeringPC"; it's just used as an SDP
+ // factory.
+ MediaConstraints offerConstraints = new MediaConstraints();
+ offerConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
+ offerConstraints.mandatory.add(
+ new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
+
+ // This PeerConnection will only be used to generate offers.
+ ObserverExpectations offeringExpectations = new ObserverExpectations("offerer");
+ PeerConnection offeringPC = factory.createPeerConnection(planBConfig, offeringExpectations);
+ assertNotNull(offeringPC);
+
+ ObserverExpectations expectations = new ObserverExpectations("PC under test");
+ PeerConnection pcUnderTest = factory.createPeerConnection(planBConfig, expectations);
+ assertNotNull(pcUnderTest);
+
+ // Add offerer media stream with just an audio track.
+ MediaStream localStream = factory.createLocalMediaStream("stream");
+ AudioTrack localAudioTrack =
+ factory.createAudioTrack("audio", factory.createAudioSource(new MediaConstraints()));
+ localStream.addTrack(localAudioTrack);
+ offeringExpectations.expectRenegotiationNeeded();
+ RtpSender audioSender =
+ offeringPC.addTrack(localAudioTrack, Collections.singletonList(localStream.getId()));
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ expectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ expectations.expectAddStream("stream");
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // Sanity check that we get one remote stream with one audio track.
+ MediaStream remoteStream = expectations.gotRemoteStreams.iterator().next();
+ assertEquals(remoteStream.audioTracks.size(), 1);
+ assertEquals(remoteStream.videoTracks.size(), 0);
+
+ // Add a video track...
+ final CameraEnumerator enumerator = new Camera1Enumerator(false /* captureToTexture */);
+ final VideoCapturer videoCapturer =
+ enumerator.createCapturer(enumerator.getDeviceNames()[0], null /* eventsHandler */);
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper", /* sharedContext= */ null);
+ final VideoSource videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoCapturer.initialize(surfaceTextureHelper, InstrumentationRegistry.getTargetContext(),
+ videoSource.getCapturerObserver());
+ VideoTrack videoTrack = factory.createVideoTrack("video", videoSource);
+ offeringExpectations.expectRenegotiationNeeded();
+ localStream.addTrack(videoTrack);
+ offeringPC.addTrack(videoTrack, Collections.singletonList(localStream.getId()));
+ // ... and create an updated offer.
+ sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer with new video track to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // The remote stream should now have a video track.
+ assertEquals(remoteStream.audioTracks.size(), 1);
+ assertEquals(remoteStream.videoTracks.size(), 1);
+
+ // Finally, create another offer with the audio track removed.
+ offeringExpectations.expectRenegotiationNeeded();
+ localStream.removeTrack(localAudioTrack);
+ localAudioTrack.dispose();
+ offeringPC.removeTrack(audioSender);
+ sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, offerConstraints);
+ assertTrue(sdpLatch.await());
+ offerSdp = sdpLatch.getSdp();
+
+ // Apply remote offer with just a video track to PC under test.
+ sdpLatch = new SdpObserverLatch();
+ pcUnderTest.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ // The remote stream should no longer have an audio track.
+ assertEquals(remoteStream.audioTracks.size(), 0);
+ assertEquals(remoteStream.videoTracks.size(), 1);
+
+ // Free the Java-land objects. Video capturer and source aren't owned by
+ // the PeerConnection and need to be disposed separately.
+ // TODO(deadbeef): Should all these events really occur on disposal?
+ // "Gathering complete" is especially odd since gathering never started.
+ // Note that this test isn't meant to test these events, but we must do
+ // this or otherwise it will crash.
+ offeringExpectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ offeringExpectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ offeringExpectations.expectSignalingChange(SignalingState.CLOSED);
+ offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ offeringPC.dispose();
+ expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectSignalingChange(SignalingState.CLOSED);
+ expectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+ pcUnderTest.dispose();
+ videoCapturer.dispose();
+ videoSource.dispose();
+ surfaceTextureHelper.dispose();
+ factory.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testRollback() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config =
+ new PeerConnection.RTCConfiguration(Collections.emptyList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ ObserverExpectations offeringExpectations = new ObserverExpectations("PCTest:offerer");
+ PeerConnection pc = factory.createPeerConnection(config, offeringExpectations);
+
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ pc.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offer = sdpLatch.getSdp();
+
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ pc.setLocalDescription(sdpLatch, offer);
+ assertTrue(sdpLatch.await());
+
+ SessionDescription rollback = new SessionDescription(SessionDescription.Type.ROLLBACK, "");
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ // TODO(bugs.webrtc.org/11970): determine if triggering ONN (twice even) is correct.
+ offeringExpectations.expectRenegotiationNeeded();
+ offeringExpectations.expectRenegotiationNeeded();
+ pc.setLocalDescription(sdpLatch, rollback);
+ assertTrue(sdpLatch.await());
+
+ assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+ }
+
+ private static void negotiate(PeerConnection offeringPC,
+ ObserverExpectations offeringExpectations, PeerConnection answeringPC,
+ ObserverExpectations answeringExpectations) {
+ // Create offer.
+ SdpObserverLatch sdpLatch = new SdpObserverLatch();
+ offeringPC.createOffer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription offerSdp = sdpLatch.getSdp();
+ assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+ assertFalse(offerSdp.description.isEmpty());
+
+ // Set local description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+ offeringPC.setLocalDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.HAVE_REMOTE_OFFER);
+ answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Create answer.
+ sdpLatch = new SdpObserverLatch();
+ answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+ assertTrue(sdpLatch.await());
+ SessionDescription answerSdp = sdpLatch.getSdp();
+ assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+ assertFalse(answerSdp.description.isEmpty());
+
+ // Set local description for answerer.
+ sdpLatch = new SdpObserverLatch();
+ answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ answeringPC.setLocalDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+
+ // Set remote description for offerer.
+ sdpLatch = new SdpObserverLatch();
+ offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+ offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+ assertTrue(sdpLatch.await());
+ assertNull(sdpLatch.getSdp());
+ }
+
+ @SuppressWarnings("deprecation") // TODO(sakal): getStats is deprecated
+ private static void shutdownPC(PeerConnection pc, ObserverExpectations expectations) {
+ if (expectations.dataChannel != null) {
+ expectations.dataChannel.unregisterObserver();
+ expectations.dataChannel.dispose();
+ }
+
+ // Call getStats (old implementation) before shutting down PC.
+ expectations.expectOldStatsCallback();
+ assertTrue(pc.getStats(expectations, null /* track */));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Call the new getStats implementation as well.
+ expectations.expectNewStatsCallback();
+ pc.getStats(expectations);
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectStandardizedIceConnectionChange(IceConnectionState.CLOSED);
+ expectations.expectConnectionChange(PeerConnectionState.CLOSED);
+ expectations.expectSignalingChange(SignalingState.CLOSED);
+ pc.close();
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ // Call getStats (old implementation) after calling close(). Should still
+ // work.
+ expectations.expectOldStatsCallback();
+ assertTrue(pc.getStats(expectations, null /* track */));
+ assertTrue(expectations.waitForAllExpectationsToBeSatisfied(DEFAULT_TIMEOUT_SECONDS));
+
+ Logging.d(TAG, "FYI stats: ");
+ int reportIndex = -1;
+ for (StatsReport[] reports : expectations.takeStatsReports()) {
+ Logging.d(TAG, " Report #" + (++reportIndex));
+ for (int i = 0; i < reports.length; ++i) {
+ Logging.d(TAG, " " + reports[i].toString());
+ }
+ }
+ assertEquals(1, reportIndex);
+ Logging.d(TAG, "End stats.");
+
+ pc.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java
new file mode 100644
index 0000000000..8eebfb5878
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionFactoryTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+
+public class PeerConnectionFactoryTest {
+ @SmallTest
+ @Test
+ public void testInitialize() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeTwice() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeTwiceWithTracer() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @SmallTest
+ @Test
+ public void testInitializeWithTracerAndShutdown() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setEnableInternalTracer(true)
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.shutdownInternalTracer();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java
new file mode 100644
index 0000000000..7ced991859
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/PeerConnectionTest.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.util.Collections.singletonList;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection.TlsCertPolicy;
+
+/** Unit tests for {@link PeerConnection}. */
+public class PeerConnectionTest {
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ }
+
+ @Test
+ @SmallTest
+ public void testIceServerChanged() throws Exception {
+ PeerConnection.IceServer iceServer1 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Same as iceServer1.
+ PeerConnection.IceServer iceServer2 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the url.
+ PeerConnection.IceServer iceServer3 =
+ PeerConnection.IceServer.builder("turn:fake.example2.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the username.
+ PeerConnection.IceServer iceServer4 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername2")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the password.
+ PeerConnection.IceServer iceServer5 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword2")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_SECURE)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS certificate policy.
+ PeerConnection.IceServer iceServer6 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the hostname.
+ PeerConnection.IceServer iceServer7 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname2")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS ALPN.
+ PeerConnection.IceServer iceServer8 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol2"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve"))
+ .createIceServer();
+ // Differs from iceServer1 by the TLS elliptic curve.
+ PeerConnection.IceServer iceServer9 =
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .setTlsCertPolicy(TlsCertPolicy.TLS_CERT_POLICY_INSECURE_NO_CHECK)
+ .setHostname("fakeHostname")
+ .setTlsAlpnProtocols(singletonList("fakeTlsAlpnProtocol"))
+ .setTlsEllipticCurves(singletonList("fakeTlsEllipticCurve2"))
+ .createIceServer();
+
+ assertTrue(iceServer1.equals(iceServer2));
+ assertFalse(iceServer1.equals(iceServer3));
+ assertFalse(iceServer1.equals(iceServer4));
+ assertFalse(iceServer1.equals(iceServer5));
+ assertFalse(iceServer1.equals(iceServer6));
+ assertFalse(iceServer1.equals(iceServer7));
+ assertFalse(iceServer1.equals(iceServer8));
+ assertFalse(iceServer1.equals(iceServer9));
+ }
+
+ // TODO(fischman) MOAR test ideas:
+ // - Test that PC.removeStream() works; requires a second
+ // createOffer/createAnswer dance.
+ // - audit each place that uses `constraints` for specifying non-trivial
+ // constraints (and ensure they're honored).
+ // - test error cases
+ // - ensure reasonable coverage of jni code is achieved. Coverage is
+ // extra-important because of all the free-text (class/method names, etc)
+ // in JNI-style programming; make sure no typos!
+ // - Test that shutdown mid-interaction is crash-free.
+
+ // Tests that the JNI glue between Java and C++ does not crash when creating a PeerConnection.
+ @Test
+ @SmallTest
+ public void testCreationWithConfig() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ List<PeerConnection.IceServer> iceServers = Arrays.asList(
+ PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer(),
+ PeerConnection.IceServer.builder("turn:fake.example.com")
+ .setUsername("fakeUsername")
+ .setPassword("fakePassword")
+ .createIceServer());
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(iceServers);
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ // Test configuration options.
+ config.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ assertNotNull(offeringPC);
+ }
+
+ @Test
+ @SmallTest
+ public void testCreationWithCertificate() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ // Test certificate.
+ RtcCertificatePem originalCert = RtcCertificatePem.generateCertificate();
+ config.certificate = originalCert;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+
+ RtcCertificatePem restoredCert = offeringPC.getCertificate();
+ assertEquals(originalCert.privateKey, restoredCert.privateKey);
+ assertEquals(originalCert.certificate, restoredCert.certificate);
+ }
+
+ @Test
+ @SmallTest
+ public void testCreationWithCryptoOptions() throws Exception {
+ PeerConnectionFactory factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+
+ assertNull(config.cryptoOptions);
+
+ CryptoOptions cryptoOptions = CryptoOptions.builder()
+ .setEnableGcmCryptoSuites(true)
+ .setEnableAes128Sha1_32CryptoCipher(true)
+ .setEnableEncryptedRtpHeaderExtensions(true)
+ .setRequireFrameEncryption(true)
+ .createCryptoOptions();
+ config.cryptoOptions = cryptoOptions;
+
+ PeerConnection offeringPC =
+ factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ assertNotNull(offeringPC);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java
new file mode 100644
index 0000000000..8b1cd67051
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RendererCommonTest.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_FILL;
+import static org.webrtc.RendererCommon.ScalingType.SCALE_ASPECT_FIT;
+import static org.webrtc.RendererCommon.getDisplaySize;
+import static org.webrtc.RendererCommon.getLayoutMatrix;
+
+import android.graphics.Point;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+
+public class RendererCommonTest {
+ @Test
+ @SmallTest
+ public void testDisplaySizeNoFrame() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
+ }
+
+ @Test
+ @SmallTest
+ public void testDisplaySizeDegenerateAspectRatio() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
+ }
+
+ @Test
+ @SmallTest
+ public void testZeroDisplaySize() {
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
+ assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
+ }
+
+ @Test
+ @SmallTest
+ public void testDisplaySizePerfectFit() {
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
+ }
+
+ @Test
+ @SmallTest
+ public void testLandscapeVideoInPortraitDisplay() {
+ assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
+ }
+
+ @Test
+ @SmallTest
+ public void testPortraitVideoInLandscapeDisplay() {
+ assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
+ assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
+ }
+
+ @Test
+ @SmallTest
+ public void testFourToThreeVideoInSixteenToNineDisplay() {
+ assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
+ assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
+ }
+
+ // Only keep 2 rounded decimals to make float comparison robust.
+ private static double[] round(float[] array) {
+ assertEquals(16, array.length);
+ final double[] doubleArray = new double[16];
+ for (int i = 0; i < 16; ++i) {
+ doubleArray[i] = Math.round(100 * array[i]) / 100.0;
+ }
+ return doubleArray;
+ }
+
+ // Brief summary about matrix transformations:
+ // A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
+ // OpenGL uses column-major order, so:
+ // u' = u * m[0] + v * m[4] + m[12].
+ // v' = u * m[1] + v * m[5] + m[13].
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixDefault() {
+ final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
+ // Assert:
+ // u' = u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixMirror() {
+ final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
+ // Assert:
+ // u' = 1 - u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+
+ @Test
+ @SmallTest
+ public void testLayoutMatrixScale() {
+ // Video has aspect ratio 2, but layout is square. This will cause only the center part of the
+ // video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
+ final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
+ // Assert:
+ // u' = 0.25 + 0.5 u.
+ // v' = v.
+ // clang-format off
+ assertArrayEquals(new double[] {
+ 0.5, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0.25, 0, 0, 1}, round(layoutMatrix), 0.0);
+ // clang-format on
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java
new file mode 100644
index 0000000000..4127bb2d4f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtcCertificatePemTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.PeerConnection;
+import org.webrtc.RtcCertificatePem;
+
+/** Tests for RtcCertificatePem.java. */
+public class RtcCertificatePemTest {
+ @Before
+ public void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testConstructor() {
+ RtcCertificatePem original = RtcCertificatePem.generateCertificate();
+ RtcCertificatePem recreated = new RtcCertificatePem(original.privateKey, original.certificate);
+ assertThat(original.privateKey).isEqualTo(recreated.privateKey);
+ assertThat(original.certificate).isEqualTo(recreated.certificate);
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateDefaults() {
+ RtcCertificatePem rtcCertificate = RtcCertificatePem.generateCertificate();
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomKeyTypeDefaultExpires() {
+ RtcCertificatePem rtcCertificate =
+ RtcCertificatePem.generateCertificate(PeerConnection.KeyType.RSA);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomExpiresDefaultKeyType() {
+ RtcCertificatePem rtcCertificate = RtcCertificatePem.generateCertificate(60 * 60 * 24);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+
+ @Test
+ @SmallTest
+ public void testGenerateCertificateCustomKeyTypeAndExpires() {
+ RtcCertificatePem rtcCertificate =
+ RtcCertificatePem.generateCertificate(PeerConnection.KeyType.RSA, 60 * 60 * 24);
+ assertThat(rtcCertificate.privateKey).isNotEmpty();
+ assertThat(rtcCertificate.certificate).isNotEmpty();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java
new file mode 100644
index 0000000000..9f315d5dc3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpSenderTest.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2020 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.Arrays;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.RtpParameters.DegradationPreference;
+
+/** Unit-tests for {@link RtpSender}. */
+public class RtpSenderTest {
+ private PeerConnectionFactory factory;
+ private PeerConnection pc;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ // RtpTranceiver is part of new unified plan semantics.
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ pc = factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ }
+
+ /** Test checking the enum values for DegradationPreference stay consistent */
+ @Test
+ @SmallTest
+ public void testSetDegradationPreference() throws Exception {
+ RtpTransceiver transceiver = pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO);
+ RtpSender sender = transceiver.getSender();
+
+ RtpParameters parameters = sender.getParameters();
+ assertNotNull(parameters);
+ assertNull(parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.MAINTAIN_FRAMERATE;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.MAINTAIN_FRAMERATE, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.MAINTAIN_RESOLUTION;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.MAINTAIN_RESOLUTION, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.BALANCED;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.BALANCED, parameters.degradationPreference);
+
+ parameters.degradationPreference = DegradationPreference.DISABLED;
+ assertTrue(sender.setParameters(parameters));
+ parameters = sender.getParameters();
+ assertEquals(DegradationPreference.DISABLED, parameters.degradationPreference);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java
new file mode 100644
index 0000000000..a53ff20f1c
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/RtpTransceiverTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.webrtc.RtpParameters.Encoding;
+import org.webrtc.RtpTransceiver.RtpTransceiverInit;
+
+/** Unit-tests for {@link RtpTransceiver}. */
+public class RtpTransceiverTest {
+ private PeerConnectionFactory factory;
+ private PeerConnection pc;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+
+ PeerConnection.RTCConfiguration config = new PeerConnection.RTCConfiguration(Arrays.asList());
+ // RtpTranceiver is part of new unified plan semantics.
+ config.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN;
+ pc = factory.createPeerConnection(config, mock(PeerConnection.Observer.class));
+ }
+
+ /** Test that RIDs get set in the RTP sender when passed in through an RtpTransceiverInit. */
+ @Test
+ @SmallTest
+ public void testSetRidInSimulcast() throws Exception {
+ List<Encoding> encodings = new ArrayList<Encoding>();
+ encodings.add(new Encoding("F", true, null));
+ encodings.add(new Encoding("H", true, null));
+
+ RtpTransceiverInit init = new RtpTransceiverInit(
+ RtpTransceiver.RtpTransceiverDirection.SEND_ONLY, Collections.emptyList(), encodings);
+ RtpTransceiver transceiver =
+ pc.addTransceiver(MediaStreamTrack.MediaType.MEDIA_TYPE_VIDEO, init);
+
+ RtpSender sender = transceiver.getSender();
+ RtpParameters parameters = sender.getParameters();
+ List<Encoding> sendEncodings = parameters.getEncodings();
+ assertEquals(2, sendEncodings.size());
+ assertEquals("F", sendEncodings.get(0).getRid());
+ assertEquals("H", sendEncodings.get(1).getRid());
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java
new file mode 100644
index 0000000000..9781d03999
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -0,0 +1,518 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.opengl.GLES20;
+import android.os.SystemClock;
+import androidx.annotation.Nullable;
+import androidx.test.filters.MediumTest;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.concurrent.CountDownLatch;
+import org.junit.Before;
+import org.junit.Test;
+
+public class SurfaceTextureHelperTest {
+ /**
+ * Mock texture listener with blocking wait functionality.
+ */
+ public static final class MockTextureListener implements VideoSink {
+ private final Object lock = new Object();
+ private @Nullable VideoFrame.TextureBuffer textureBuffer;
+ // Thread where frames are expected to be received on.
+ private final @Nullable Thread expectedThread;
+
+ MockTextureListener() {
+ this.expectedThread = null;
+ }
+
+ MockTextureListener(Thread expectedThread) {
+ this.expectedThread = expectedThread;
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ if (expectedThread != null && Thread.currentThread() != expectedThread) {
+ throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
+ }
+ synchronized (lock) {
+ this.textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
+ textureBuffer.retain();
+ lock.notifyAll();
+ }
+ }
+
+ /** Wait indefinitely for a new textureBuffer. */
+ public VideoFrame.TextureBuffer waitForTextureBuffer() throws InterruptedException {
+ synchronized (lock) {
+ while (true) {
+ final VideoFrame.TextureBuffer textureBufferToReturn = textureBuffer;
+ if (textureBufferToReturn != null) {
+ textureBuffer = null;
+ return textureBufferToReturn;
+ }
+ lock.wait();
+ }
+ }
+ }
+
+ /** Make sure we get no frame in the specified time period. */
+ public void assertNoFrameIsDelivered(final long waitPeriodMs) throws InterruptedException {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = waitPeriodMs;
+ synchronized (lock) {
+ while (textureBuffer == null && timeRemainingMs > 0) {
+ lock.wait(timeRemainingMs);
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = waitPeriodMs - elapsedTimeMs;
+ }
+ assertTrue(textureBuffer == null);
+ }
+ }
+ }
+
+ /** Assert that two integers are close, with difference at most
+ * {@code threshold}. */
+ public static void assertClose(int threshold, int expected, int actual) {
+ if (Math.abs(expected - actual) <= threshold)
+ return;
+ fail("Not close enough, threshold " + threshold + ". Expected: " + expected + " Actual: "
+ + actual);
+ }
+
+ @Before
+ public void setUp() {
+ // Load the JNI library for textureToYuv.
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ /**
+ * Test normal use by receiving three uniform texture frames. Texture frames are returned as early
+ * as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @Test
+ @MediumTest
+ public void testThreeConstantColorFrames() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+ final GlRectDrawer drawer = new GlRectDrawer();
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglOesBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+
+ // Wait for an OES texture to arrive and draw it onto the pixel buffer.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ eglBase.makeCurrent();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ width, height, 0, 0, width, height);
+ textureBuffer.release();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
+ // Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red[i]);
+ assertEquals(rgbaData.get() & 0xFF, green[i]);
+ assertEquals(rgbaData.get() & 0xFF, blue[i]);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ }
+
+ drawer.release();
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
+ * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
+ * buffer and reading it back with glReadPixels().
+ */
+ @Test
+ @MediumTest
+ public void testLateReturnFrame() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createPbufferSurface(width, height);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglOesBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+ final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+ eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglOesBase.surfaceWidth(), width);
+ assertEquals(eglOesBase.surfaceHeight(), height);
+
+ final int red = 79;
+ final int green = 66;
+ final int blue = 161;
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglOesBase.makeCurrent();
+ GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglOesBase.swapBuffers();
+ eglOesBase.release();
+
+ // Wait for OES texture frame.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ // Diconnect while holding the frame.
+ surfaceTextureHelper.dispose();
+
+ // Draw the pending texture frame onto the pixel buffer.
+ eglBase.makeCurrent();
+ final GlRectDrawer drawer = new GlRectDrawer();
+ drawer.drawOes(textureBuffer.getTextureId(),
+ RendererCommon.convertMatrixFromAndroidGraphicsMatrix(textureBuffer.getTransformMatrix()),
+ width, height, 0, 0, width, height);
+ drawer.release();
+
+ // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+ final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+ GlUtil.checkNoGLES2Error("glReadPixels");
+ eglBase.release();
+
+ // Assert rendered image is expected constant color.
+ while (rgbaData.hasRemaining()) {
+ assertEquals(rgbaData.get() & 0xFF, red);
+ assertEquals(rgbaData.get() & 0xFF, green);
+ assertEquals(rgbaData.get() & 0xFF, blue);
+ assertEquals(rgbaData.get() & 0xFF, 255);
+ }
+ // Late frame return after everything has been disposed and released.
+ textureBuffer.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper, but keep trying to produce more texture frames. No
+ * frames should be delivered to the listener.
+ */
+ @Test
+ @MediumTest
+ public void testDispose() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener.waitForTextureBuffer().release();
+
+ // Dispose - we should not receive any textures after this.
+ surfaceTextureHelper.dispose();
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ // swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
+ // Assert that no OES texture was delivered.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 500);
+
+ eglBase.release();
+ }
+
+ /**
+ * Test disposing the SurfaceTextureHelper immediately after is has been setup to use a
+ * shared context. No frames should be delivered to the listener.
+ */
+ @Test
+ @SmallTest
+ public void testDisposeImmediately() {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Call stopListening(), but keep trying to produce more texture frames. No frames should be
+ * delivered to the listener.
+ */
+ @Test
+ @MediumTest
+ public void testStopListening() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener.waitForTextureBuffer().release();
+
+ // Stop listening - we should not receive any textures after this.
+ surfaceTextureHelper.stopListening();
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ // swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
+ // Assert that no OES texture was delivered.
+ listener.assertNoFrameIsDelivered(/* waitPeriodMs= */ 500);
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ /**
+ * Test stopListening() immediately after the SurfaceTextureHelper has been setup.
+ */
+ @Test
+ @SmallTest
+ public void testStopListeningImmediately() throws InterruptedException {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Test stopListening() immediately after the SurfaceTextureHelper has been setup on the handler
+ * thread.
+ */
+ @Test
+ @SmallTest
+ public void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ final MockTextureListener listener = new MockTextureListener();
+
+ final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
+ final CountDownLatch stopListeningBarrierDone = new CountDownLatch(1);
+ // Start by posting to the handler thread to keep it occupied.
+ surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ ThreadUtils.awaitUninterruptibly(stopListeningBarrier);
+ surfaceTextureHelper.stopListening();
+ stopListeningBarrierDone.countDown();
+ }
+ });
+
+ // startListening() is asynchronous and will post to the occupied handler thread.
+ surfaceTextureHelper.startListening(listener);
+ // Wait for stopListening() to be called on the handler thread.
+ stopListeningBarrier.countDown();
+ stopListeningBarrierDone.await();
+ // Wait until handler thread is idle to try to catch late startListening() call.
+ final CountDownLatch barrier = new CountDownLatch(1);
+ surfaceTextureHelper.getHandler().post(new Runnable() {
+ @Override
+ public void run() {
+ barrier.countDown();
+ }
+ });
+ ThreadUtils.awaitUninterruptibly(barrier);
+ // Previous startListening() call should never have taken place and it should be ok to call it
+ // again.
+ surfaceTextureHelper.startListening(listener);
+
+ surfaceTextureHelper.dispose();
+ }
+
+ /**
+ * Test calling startListening() with a new listener after stopListening() has been called.
+ */
+ @Test
+ @MediumTest
+ public void testRestartListeningWithNewListener() throws InterruptedException {
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
+ surfaceTextureHelper.setTextureSize(/* textureWidth= */ 32, /* textureHeight= */ 32);
+ final MockTextureListener listener1 = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener1);
+ // Create EglBase with the SurfaceTexture as target EGLSurface.
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ // Assert no frame has been received yet.
+ listener1.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+ // Draw and wait for one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+ listener1.waitForTextureBuffer().release();
+
+ // Stop listening - `listener1` should not receive any textures after this.
+ surfaceTextureHelper.stopListening();
+
+ // Connect different listener.
+ final MockTextureListener listener2 = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener2);
+ // Assert no frame has been received yet.
+ listener2.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+
+ // Draw one frame.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+
+ // Check that `listener2` received the frame, and not `listener1`.
+ listener2.waitForTextureBuffer().release();
+ listener1.assertNoFrameIsDelivered(/* waitPeriodMs= */ 1);
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+
+ @Test
+ @MediumTest
+ public void testTexturetoYuv() throws InterruptedException {
+ final int width = 16;
+ final int height = 16;
+
+ final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+ // Create SurfaceTextureHelper and listener.
+ final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
+ "SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
+ final MockTextureListener listener = new MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ // Create resources for stubbing an OES texture producer. `eglBase` has the SurfaceTexture in
+ // `surfaceTextureHelper` as the target EGLSurface.
+
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ assertEquals(eglBase.surfaceWidth(), width);
+ assertEquals(eglBase.surfaceHeight(), height);
+
+ final int red[] = new int[] {79, 144, 185};
+ final int green[] = new int[] {66, 210, 162};
+ final int blue[] = new int[] {161, 117, 158};
+
+ final int ref_y[] = new int[] {85, 170, 161};
+ final int ref_u[] = new int[] {168, 97, 123};
+ final int ref_v[] = new int[] {127, 106, 138};
+
+ // Draw three frames.
+ for (int i = 0; i < 3; ++i) {
+ // Draw a constant color frame onto the SurfaceTexture.
+ eglBase.makeCurrent();
+ GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+ eglBase.swapBuffers();
+
+ // Wait for an OES texture to arrive.
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ final VideoFrame.I420Buffer i420 = textureBuffer.toI420();
+ textureBuffer.release();
+
+ // Memory layout: Lines are 16 bytes. First 16 lines are
+ // the Y data. These are followed by 8 lines with 8 bytes of U
+ // data on the left and 8 bytes of V data on the right.
+ //
+ // Offset
+ // 0 YYYYYYYY YYYYYYYY
+ // 16 YYYYYYYY YYYYYYYY
+ // ...
+ // 240 YYYYYYYY YYYYYYYY
+ // 256 UUUUUUUU VVVVVVVV
+ // 272 UUUUUUUU VVVVVVVV
+ // ...
+ // 368 UUUUUUUU VVVVVVVV
+ // 384 buffer end
+
+ // Allow off-by-one differences due to different rounding.
+ final ByteBuffer dataY = i420.getDataY();
+ final int strideY = i420.getStrideY();
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ assertClose(1, ref_y[i], dataY.get(y * strideY + x) & 0xFF);
+ }
+ }
+
+ final int chromaWidth = width / 2;
+ final int chromaHeight = height / 2;
+
+ final ByteBuffer dataU = i420.getDataU();
+ final ByteBuffer dataV = i420.getDataV();
+ final int strideU = i420.getStrideU();
+ final int strideV = i420.getStrideV();
+ for (int y = 0; y < chromaHeight; y++) {
+ for (int x = 0; x < chromaWidth; x++) {
+ assertClose(1, ref_u[i], dataU.get(y * strideU + x) & 0xFF);
+ assertClose(1, ref_v[i], dataV.get(y * strideV + x) & 0xFF);
+ }
+ }
+ i420.release();
+ }
+
+ surfaceTextureHelper.dispose();
+ eglBase.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
new file mode 100644
index 0000000000..4d499789e6
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.support.test.InstrumentationRegistry;
+import android.support.test.annotation.UiThreadTest;
+import android.support.test.rule.UiThreadTestRule;
+import android.view.View.MeasureSpec;
+import androidx.test.filters.MediumTest;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class SurfaceViewRendererOnMeasureTest {
+ @Rule public final UiThreadTestRule uiThreadRule = new UiThreadTestRule();
+
+ /**
+ * List with all possible scaling types.
+ */
+ private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
+ RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
+ RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
+
+ /**
+ * List with MeasureSpec modes.
+ */
+ private static final List<Integer> measureSpecModes =
+ Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
+
+ /**
+ * Returns a dummy YUV frame.
+ */
+ static VideoFrame createFrame(int width, int height, int rotationDegree) {
+ final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
+ final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
+ final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ for (int i = 0; i < 3; ++i) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
+ }
+ final VideoFrame.I420Buffer buffer =
+ JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1], yuvStrides[1],
+ yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
+ return new VideoFrame(buffer, rotationDegree, 0 /* timestamp */);
+ }
+
+ /**
+ * Assert onMeasure() with given parameters will result in expected measured size.
+ */
+ @SuppressLint("WrongCall")
+ private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
+ RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
+ int expectedHeight, int widthSpec, int heightSpec) {
+ surfaceViewRenderer.setScalingType(scalingType);
+ surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
+ final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
+ final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
+ if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
+ fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
+ + ")"
+ + " with scaling type " + scalingType + " and frame: " + frameDimensions
+ + " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
+ + measuredWidth + "x" + measuredHeight);
+ }
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
+ */
+ @Test
+ @UiThreadTest
+ @MediumTest
+ public void testNoFrame() {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(InstrumentationRegistry.getContext());
+ final String frameDimensions = "null";
+
+ // Test behaviour before SurfaceViewRenderer.init() is called.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+ zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
+ surfaceViewRenderer.init((EglBase.Context) null, null);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
+ zeroMeasureSize);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
+ MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+
+ /**
+ * Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
+ */
+ @Test
+ @UiThreadTest
+ @MediumTest
+ public void testFrame1280x720() throws InterruptedException {
+ final SurfaceViewRenderer surfaceViewRenderer =
+ new SurfaceViewRenderer(InstrumentationRegistry.getContext());
+ /**
+ * Mock renderer events with blocking wait functionality for frame size changes.
+ */
+ class MockRendererEvents implements RendererCommon.RendererEvents {
+ private int frameWidth;
+ private int frameHeight;
+ private int rotation;
+
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void waitForFrameSize(int frameWidth, int frameHeight, int rotation)
+ throws InterruptedException {
+ while (this.frameWidth != frameWidth || this.frameHeight != frameHeight
+ || this.rotation != rotation) {
+ wait();
+ }
+ }
+
+ @Override
+ public void onFirstFrameRendered() {}
+
+ @Override
+ // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+ @SuppressWarnings("NoSynchronizedMethodCheck")
+ public synchronized void onFrameResolutionChanged(
+ int frameWidth, int frameHeight, int rotation) {
+ this.frameWidth = frameWidth;
+ this.frameHeight = frameHeight;
+ this.rotation = rotation;
+ notifyAll();
+ }
+ }
+ final MockRendererEvents rendererEvents = new MockRendererEvents();
+ surfaceViewRenderer.init((EglBase.Context) null, rendererEvents);
+
+ // Test different rotation degress, but same rotated size.
+ for (int rotationDegree : new int[] {0, 90, 180, 270}) {
+ final int rotatedWidth = 1280;
+ final int rotatedHeight = 720;
+ final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
+ final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
+ final VideoFrame frame = createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
+ assertEquals(rotatedWidth, frame.getRotatedWidth());
+ assertEquals(rotatedHeight, frame.getRotatedHeight());
+ final String frameDimensions =
+ unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
+ surfaceViewRenderer.onFrame(frame);
+ frame.release();
+ rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
+
+ // Test forcing to zero size.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
+ zeroMeasureSize, zeroMeasureSize);
+ }
+ }
+
+ // Test perfect fit.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ for (int measureSpecMode : measureSpecModes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
+ rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
+ MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
+ }
+ }
+
+ // Force spec size with different aspect ratio than frame aspect ratio.
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
+ MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
+ MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
+ }
+
+ final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
+ {
+ // Relax both width and height constraints.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+ expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force width to 720, but relax height constraint. This will give the same result as
+ // above, because width is already the limiting factor and will be maxed out.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ final Point expectedSize =
+ RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+ assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
+ expectedSize.y, widthSpec, heightSpec);
+ }
+ }
+ {
+ // Force height, but relax width constraint. This will force a bad layout size.
+ final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
+ for (RendererCommon.ScalingType scalingType : scalingTypes) {
+ assertMeasuredSize(
+ surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
+ }
+ }
+ }
+
+ surfaceViewRenderer.release();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java
new file mode 100644
index 0000000000..6c7904c9f3
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TestConstants.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright 2018 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+class TestConstants {
+ public static final String NATIVE_LIBRARY = "jingle_peerconnection_instrumentationtests_so";
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java
new file mode 100644
index 0000000000..46cb37e5f1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/TimestampAlignerTest.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import androidx.test.filters.SmallTest;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TimestampAlignerTest {
+ @BeforeClass
+ public static void setUp() {
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testGetRtcTimeNanos() {
+ TimestampAligner.getRtcTimeNanos();
+ }
+
+ @Test
+ @SmallTest
+ public void testDispose() {
+ final TimestampAligner timestampAligner = new TimestampAligner();
+ timestampAligner.dispose();
+ }
+
+ @Test
+ @SmallTest
+ public void testTranslateTimestamp() {
+ final TimestampAligner timestampAligner = new TimestampAligner();
+ timestampAligner.translateTimestamp(/* cameraTimeNs= */ 123);
+ timestampAligner.dispose();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java
new file mode 100644
index 0000000000..9c66edd8ef
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFileRendererTest.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+
+import android.os.Environment;
+import androidx.test.filters.SmallTest;
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import org.junit.Before;
+import org.junit.Test;
+
+public class VideoFileRendererTest {
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @Test
+ @SmallTest
+ public void testYuvRenderingToFile() throws InterruptedException, IOException {
+ EglBase eglBase = EglBase.create();
+ final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ + "/chromium_tests_root/testvideoout.y4m";
+ int frameWidth = 4;
+ int frameHeight = 4;
+ VideoFileRenderer videoFileRenderer =
+ new VideoFileRenderer(videoOutPath, frameWidth, frameHeight, eglBase.getEglBaseContext());
+
+ String[] frames = {
+ "THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
+
+ for (String frameStr : frames) {
+ int[] planeSizes = {
+ frameWidth * frameWidth, frameWidth * frameHeight / 4, frameWidth * frameHeight / 4};
+ int[] yuvStrides = {frameWidth, frameWidth / 2, frameWidth / 2};
+
+ ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+ byte[] frameBytes = frameStr.getBytes(Charset.forName("US-ASCII"));
+ int pos = 0;
+ for (int i = 0; i < 3; i++) {
+ yuvPlanes[i] = ByteBuffer.allocateDirect(planeSizes[i]);
+ yuvPlanes[i].put(frameBytes, pos, planeSizes[i]);
+ yuvPlanes[i].rewind();
+ pos += planeSizes[i];
+ }
+
+ VideoFrame.I420Buffer buffer =
+ JavaI420Buffer.wrap(frameWidth, frameHeight, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
+ yuvStrides[1], yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
+
+ VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestampNs */);
+ videoFileRenderer.onFrame(frame);
+ frame.release();
+ }
+ videoFileRenderer.release();
+
+ RandomAccessFile writtenFile = new RandomAccessFile(videoOutPath, "r");
+ try {
+ int length = (int) writtenFile.length();
+ byte[] data = new byte[length];
+ writtenFile.readFully(data);
+ String fileContent = new String(data, Charset.forName("US-ASCII"));
+ String expected = "YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1\n"
+ + "FRAME\n"
+ + "THIS IS JUST SOME TEXT xFRAME\n"
+ + "THE SECOND FRAME qwerty.FRAME\n"
+ + "HERE IS THE THRID FRAME!";
+ assertEquals(expected, fileContent);
+ } finally {
+ writtenFile.close();
+ }
+
+ new File(videoOutPath).delete();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java
new file mode 100644
index 0000000000..3668cd71b1
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java
@@ -0,0 +1,530 @@
+/*
+ * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+import android.graphics.Matrix;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.webrtc.VideoFrame;
+
+/**
+ * Test VideoFrame buffers of different kind of formats: I420, RGB, OES, NV12, NV21, and verify
+ * toI420() and cropAndScale() behavior. Creating RGB/OES textures involves VideoFrameDrawer and
+ * GlRectDrawer and we are testing the full chain I420 -> OES/RGB texture -> I420, with and without
+ * cropping in the middle. Reading textures back to I420 also exercises the YuvConverter code.
+ */
+@RunWith(Parameterized.class)
+public class VideoFrameBufferTest {
+ /**
+ * These tests are parameterized on this enum which represents the different VideoFrame.Buffers.
+ */
+ private static enum BufferType { I420_JAVA, I420_NATIVE, RGB_TEXTURE, OES_TEXTURE, NV21, NV12 }
+
+ @Parameters(name = "{0}")
+ public static Collection<BufferType> parameters() {
+ return Arrays.asList(BufferType.values());
+ }
+
+ @BeforeClass
+ public static void setUp() {
+ // Needed for JniCommon.nativeAllocateByteBuffer() to work, which is used from JavaI420Buffer.
+ System.loadLibrary(TestConstants.NATIVE_LIBRARY);
+ }
+
+ private final BufferType bufferType;
+
+ public VideoFrameBufferTest(BufferType bufferType) {
+ this.bufferType = bufferType;
+ }
+
+ /**
+ * Create a VideoFrame.Buffer of the given type with the same pixel content as the given I420
+ * buffer.
+ */
+ private static VideoFrame.Buffer createBufferWithType(
+ BufferType bufferType, VideoFrame.I420Buffer i420Buffer) {
+ VideoFrame.Buffer buffer;
+ switch (bufferType) {
+ case I420_JAVA:
+ buffer = i420Buffer;
+ buffer.retain();
+ assertEquals(VideoFrameBufferType.I420, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(buffer));
+ return buffer;
+ case I420_NATIVE:
+ buffer = nativeGetNativeI420Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.I420, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(buffer));
+ return buffer;
+ case RGB_TEXTURE:
+ buffer = createRgbTextureBuffer(/* eglContext= */ null, i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case OES_TEXTURE:
+ buffer = createOesTextureBuffer(/* eglContext= */ null, i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case NV21:
+ buffer = createNV21Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ case NV12:
+ buffer = createNV12Buffer(i420Buffer);
+ assertEquals(VideoFrameBufferType.NATIVE, buffer.getBufferType());
+ assertEquals(VideoFrameBufferType.NATIVE, nativeGetBufferType(buffer));
+ return buffer;
+ default:
+ throw new IllegalArgumentException("Unknown buffer type: " + bufferType);
+ }
+ }
+
+ private VideoFrame.Buffer createBufferToTest(VideoFrame.I420Buffer i420Buffer) {
+ return createBufferWithType(this.bufferType, i420Buffer);
+ }
+
+ /**
+ * Creates a 16x16 I420 buffer that varies smoothly and spans all RGB values.
+ */
+ public static VideoFrame.I420Buffer createTestI420Buffer() {
+ final int width = 16;
+ final int height = 16;
+ final int[] yData = new int[] {156, 162, 167, 172, 177, 182, 187, 193, 199, 203, 209, 214, 219,
+ 224, 229, 235, 147, 152, 157, 162, 168, 173, 178, 183, 188, 193, 199, 205, 210, 215, 220,
+ 225, 138, 143, 148, 153, 158, 163, 168, 174, 180, 184, 190, 195, 200, 205, 211, 216, 128,
+ 133, 138, 144, 149, 154, 159, 165, 170, 175, 181, 186, 191, 196, 201, 206, 119, 124, 129,
+ 134, 140, 145, 150, 156, 161, 166, 171, 176, 181, 187, 192, 197, 109, 114, 119, 126, 130,
+ 136, 141, 146, 151, 156, 162, 167, 172, 177, 182, 187, 101, 105, 111, 116, 121, 126, 132,
+ 137, 142, 147, 152, 157, 162, 168, 173, 178, 90, 96, 101, 107, 112, 117, 122, 127, 132, 138,
+ 143, 148, 153, 158, 163, 168, 82, 87, 92, 97, 102, 107, 113, 118, 123, 128, 133, 138, 144,
+ 149, 154, 159, 72, 77, 83, 88, 93, 98, 103, 108, 113, 119, 124, 129, 134, 139, 144, 150, 63,
+ 68, 73, 78, 83, 89, 94, 99, 104, 109, 114, 119, 125, 130, 135, 140, 53, 58, 64, 69, 74, 79,
+ 84, 89, 95, 100, 105, 110, 115, 120, 126, 131, 44, 49, 54, 59, 64, 70, 75, 80, 85, 90, 95,
+ 101, 106, 111, 116, 121, 34, 40, 45, 50, 55, 60, 65, 71, 76, 81, 86, 91, 96, 101, 107, 113,
+ 25, 30, 35, 40, 46, 51, 56, 61, 66, 71, 77, 82, 87, 92, 98, 103, 16, 21, 26, 31, 36, 41, 46,
+ 52, 57, 62, 67, 72, 77, 83, 89, 94};
+ final int[] uData = new int[] {110, 113, 116, 118, 120, 123, 125, 128, 113, 116, 118, 120, 123,
+ 125, 128, 130, 116, 118, 120, 123, 125, 128, 130, 132, 118, 120, 123, 125, 128, 130, 132,
+ 135, 120, 123, 125, 128, 130, 132, 135, 138, 123, 125, 128, 130, 132, 135, 138, 139, 125,
+ 128, 130, 132, 135, 138, 139, 142, 128, 130, 132, 135, 138, 139, 142, 145};
+ final int[] vData = new int[] {31, 45, 59, 73, 87, 100, 114, 127, 45, 59, 73, 87, 100, 114, 128,
+ 141, 59, 73, 87, 100, 114, 127, 141, 155, 73, 87, 100, 114, 127, 141, 155, 168, 87, 100,
+ 114, 128, 141, 155, 168, 182, 100, 114, 128, 141, 155, 168, 182, 197, 114, 127, 141, 155,
+ 168, 182, 196, 210, 127, 141, 155, 168, 182, 196, 210, 224};
+ return JavaI420Buffer.wrap(width, height, toByteBuffer(yData),
+ /* strideY= */ width, toByteBuffer(uData), /* strideU= */ width / 2, toByteBuffer(vData),
+ /* strideV= */ width / 2,
+ /* releaseCallback= */ null);
+ }
+
+ /**
+ * Create an RGB texture buffer available in `eglContext` with the same pixel content as the given
+ * I420 buffer.
+ */
+ public static VideoFrame.TextureBuffer createRgbTextureBuffer(
+ EglBase.Context eglContext, VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+
+ final HandlerThread renderThread = new HandlerThread("RGB texture thread");
+ renderThread.start();
+ final Handler renderThreadHandler = new Handler(renderThread.getLooper());
+ return ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // Create EGL base with a pixel buffer as display output.
+ final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PIXEL_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ final GlTextureFrameBuffer textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
+ textureFrameBuffer.setSize(width, height);
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
+ drawI420Buffer(i420Buffer);
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+
+ final YuvConverter yuvConverter = new YuvConverter();
+ return new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.RGB,
+ textureFrameBuffer.getTextureId(),
+ /* transformMatrix= */ new Matrix(), renderThreadHandler, yuvConverter,
+ /* releaseCallback= */ () -> renderThreadHandler.post(() -> {
+ textureFrameBuffer.release();
+ yuvConverter.release();
+ eglBase.release();
+ renderThread.quit();
+ }));
+ });
+ }
+
+ /**
+ * Create an OES texture buffer available in `eglContext` with the same pixel content as the given
+ * I420 buffer.
+ */
+ public static VideoFrame.TextureBuffer createOesTextureBuffer(
+ EglBase.Context eglContext, VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+
+ // Create resources for generating OES textures.
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create("SurfaceTextureHelper test", eglContext);
+ surfaceTextureHelper.setTextureSize(width, height);
+
+ final HandlerThread renderThread = new HandlerThread("OES texture thread");
+ renderThread.start();
+ final Handler renderThreadHandler = new Handler(renderThread.getLooper());
+ final VideoFrame.TextureBuffer oesBuffer =
+ ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+ // Create EGL base with the SurfaceTexture as display output.
+ final EglBase eglBase = EglBase.create(eglContext, EglBase.CONFIG_PLAIN);
+ eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+ eglBase.makeCurrent();
+ assertEquals(width, eglBase.surfaceWidth());
+ assertEquals(height, eglBase.surfaceHeight());
+
+ final SurfaceTextureHelperTest.MockTextureListener listener =
+ new SurfaceTextureHelperTest.MockTextureListener();
+ surfaceTextureHelper.startListening(listener);
+
+ // Draw the frame and block until an OES texture is delivered.
+ drawI420Buffer(i420Buffer);
+ eglBase.swapBuffers();
+ final VideoFrame.TextureBuffer textureBuffer = listener.waitForTextureBuffer();
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper.dispose();
+
+ return textureBuffer;
+ });
+ renderThread.quit();
+
+ return oesBuffer;
+ }
+
+ /** Create an NV21Buffer with the same pixel content as the given I420 buffer. */
+ public static NV21Buffer createNV21Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+ final int chromaStride = width;
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final int ySize = width * height;
+
+ final ByteBuffer nv21Buffer = ByteBuffer.allocateDirect(ySize + chromaStride * chromaHeight);
+ // We don't care what the array offset is since we only want an array that is direct.
+ @SuppressWarnings("ByteBufferBackingArray") final byte[] nv21Data = nv21Buffer.array();
+
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final byte yValue = i420Buffer.getDataY().get(y * i420Buffer.getStrideY() + x);
+ nv21Data[y * width + x] = yValue;
+ }
+ }
+ for (int y = 0; y < chromaHeight; ++y) {
+ for (int x = 0; x < chromaWidth; ++x) {
+ final byte uValue = i420Buffer.getDataU().get(y * i420Buffer.getStrideU() + x);
+ final byte vValue = i420Buffer.getDataV().get(y * i420Buffer.getStrideV() + x);
+ nv21Data[ySize + y * chromaStride + 2 * x + 0] = vValue;
+ nv21Data[ySize + y * chromaStride + 2 * x + 1] = uValue;
+ }
+ }
+ return new NV21Buffer(nv21Data, width, height, /* releaseCallback= */ null);
+ }
+
+ /** Create an NV12Buffer with the same pixel content as the given I420 buffer. */
+ public static NV12Buffer createNV12Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final int width = i420Buffer.getWidth();
+ final int height = i420Buffer.getHeight();
+ final int chromaStride = width;
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final int ySize = width * height;
+
+ final ByteBuffer nv12Buffer = ByteBuffer.allocateDirect(ySize + chromaStride * chromaHeight);
+
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final byte yValue = i420Buffer.getDataY().get(y * i420Buffer.getStrideY() + x);
+ nv12Buffer.put(y * width + x, yValue);
+ }
+ }
+ for (int y = 0; y < chromaHeight; ++y) {
+ for (int x = 0; x < chromaWidth; ++x) {
+ final byte uValue = i420Buffer.getDataU().get(y * i420Buffer.getStrideU() + x);
+ final byte vValue = i420Buffer.getDataV().get(y * i420Buffer.getStrideV() + x);
+ nv12Buffer.put(ySize + y * chromaStride + 2 * x + 0, uValue);
+ nv12Buffer.put(ySize + y * chromaStride + 2 * x + 1, vValue);
+ }
+ }
+ return new NV12Buffer(width, height, /* stride= */ width, /* sliceHeight= */ height, nv12Buffer,
+ /* releaseCallback */ null);
+ }
+
+ /** Print the ByteBuffer plane to the StringBuilder. */
+ private static void printPlane(
+ StringBuilder stringBuilder, int width, int height, ByteBuffer plane, int stride) {
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int value = plane.get(y * stride + x) & 0xFF;
+ if (x != 0) {
+ stringBuilder.append(", ");
+ }
+ stringBuilder.append(value);
+ }
+ stringBuilder.append("\n");
+ }
+ }
+
+ /** Convert the pixel content of an I420 buffer to a string representation. */
+ private static String i420BufferToString(VideoFrame.I420Buffer buffer) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ stringBuilder.append(
+ "I420 buffer with size: " + buffer.getWidth() + "x" + buffer.getHeight() + ".\n");
+ stringBuilder.append("Y-plane:\n");
+ printPlane(stringBuilder, buffer.getWidth(), buffer.getHeight(), buffer.getDataY(),
+ buffer.getStrideY());
+ final int chromaWidth = (buffer.getWidth() + 1) / 2;
+ final int chromaHeight = (buffer.getHeight() + 1) / 2;
+ stringBuilder.append("U-plane:\n");
+ printPlane(stringBuilder, chromaWidth, chromaHeight, buffer.getDataU(), buffer.getStrideU());
+ stringBuilder.append("V-plane:\n");
+ printPlane(stringBuilder, chromaWidth, chromaHeight, buffer.getDataV(), buffer.getStrideV());
+ return stringBuilder.toString();
+ }
+
+ /**
+ * Assert that the given I420 buffers are almost identical, allowing for some difference due to
+ * numerical errors. It has limits for both overall PSNR and maximum individual pixel difference.
+ */
+ public static void assertAlmostEqualI420Buffers(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ final int diff = maxDiff(bufferA, bufferB);
+ assertThat("Pixel difference too high: " + diff + "."
+ + "\nBuffer A: " + i420BufferToString(bufferA)
+ + "Buffer B: " + i420BufferToString(bufferB),
+ diff, lessThanOrEqualTo(4));
+ final double psnr = calculatePsnr(bufferA, bufferB);
+ assertThat("PSNR too low: " + psnr + "."
+ + "\nBuffer A: " + i420BufferToString(bufferA)
+ + "Buffer B: " + i420BufferToString(bufferB),
+ psnr, greaterThanOrEqualTo(50.0));
+ }
+
+ /** Returns a flattened list of pixel differences for two ByteBuffer planes. */
+ private static List<Integer> getPixelDiffs(
+ int width, int height, ByteBuffer planeA, int strideA, ByteBuffer planeB, int strideB) {
+ List<Integer> res = new ArrayList<>();
+ for (int y = 0; y < height; ++y) {
+ for (int x = 0; x < width; ++x) {
+ final int valueA = planeA.get(y * strideA + x) & 0xFF;
+ final int valueB = planeB.get(y * strideB + x) & 0xFF;
+ res.add(Math.abs(valueA - valueB));
+ }
+ }
+ return res;
+ }
+
+ /** Returns a flattened list of pixel differences for two I420 buffers. */
+ private static List<Integer> getPixelDiffs(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ assertEquals(bufferA.getWidth(), bufferB.getWidth());
+ assertEquals(bufferA.getHeight(), bufferB.getHeight());
+ final int width = bufferA.getWidth();
+ final int height = bufferA.getHeight();
+ final int chromaWidth = (width + 1) / 2;
+ final int chromaHeight = (height + 1) / 2;
+ final List<Integer> diffs = getPixelDiffs(width, height, bufferA.getDataY(),
+ bufferA.getStrideY(), bufferB.getDataY(), bufferB.getStrideY());
+ diffs.addAll(getPixelDiffs(chromaWidth, chromaHeight, bufferA.getDataU(), bufferA.getStrideU(),
+ bufferB.getDataU(), bufferB.getStrideU()));
+ diffs.addAll(getPixelDiffs(chromaWidth, chromaHeight, bufferA.getDataV(), bufferA.getStrideV(),
+ bufferB.getDataV(), bufferB.getStrideV()));
+ return diffs;
+ }
+
+ /** Returns the maximum pixel difference from any of the Y/U/V planes in the given buffers. */
+ private static int maxDiff(VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ return Collections.max(getPixelDiffs(bufferA, bufferB));
+ }
+
+ /**
+ * Returns the PSNR given a sum of squared error and the number of measurements that were added.
+ */
+ private static double sseToPsnr(long sse, int count) {
+ if (sse == 0) {
+ return Double.POSITIVE_INFINITY;
+ }
+ final double meanSquaredError = (double) sse / (double) count;
+ final double maxPixelValue = 255.0;
+ return 10.0 * Math.log10(maxPixelValue * maxPixelValue / meanSquaredError);
+ }
+
+ /** Returns the PSNR of the given I420 buffers. */
+ private static double calculatePsnr(
+ VideoFrame.I420Buffer bufferA, VideoFrame.I420Buffer bufferB) {
+ final List<Integer> pixelDiffs = getPixelDiffs(bufferA, bufferB);
+ long sse = 0;
+ for (int pixelDiff : pixelDiffs) {
+ sse += pixelDiff * pixelDiff;
+ }
+ return sseToPsnr(sse, pixelDiffs.size());
+ }
+
+ /**
+ * Convert an int array to a byte array and make sure the values are within the range [0, 255].
+ */
+ private static byte[] toByteArray(int[] array) {
+ final byte[] res = new byte[array.length];
+ for (int i = 0; i < array.length; ++i) {
+ final int value = array[i];
+ assertThat(value, greaterThanOrEqualTo(0));
+ assertThat(value, lessThanOrEqualTo(255));
+ res[i] = (byte) value;
+ }
+ return res;
+ }
+
+ /** Convert a byte array to a direct ByteBuffer. */
+ private static ByteBuffer toByteBuffer(int[] array) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(array.length);
+ buffer.put(toByteArray(array));
+ buffer.rewind();
+ return buffer;
+ }
+
+ /**
+ * Draw an I420 buffer on the currently bound frame buffer, allocating and releasing any
+ * resources necessary.
+ */
+ private static void drawI420Buffer(VideoFrame.I420Buffer i420Buffer) {
+ final GlRectDrawer drawer = new GlRectDrawer();
+ final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
+ videoFrameDrawer.drawFrame(
+ new VideoFrame(i420Buffer, /* rotation= */ 0, /* timestampNs= */ 0), drawer);
+ videoFrameDrawer.release();
+ drawer.release();
+ }
+
+ /**
+ * Helper function that tests cropAndScale() with the given cropping and scaling parameters, and
+ * compares the pixel content against a reference I420 buffer.
+ */
+ private void testCropAndScale(
+ int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+ final VideoFrame.I420Buffer referenceI420Buffer = createTestI420Buffer();
+ final VideoFrame.Buffer bufferToTest = createBufferToTest(referenceI420Buffer);
+
+ final VideoFrame.Buffer croppedReferenceBuffer = referenceI420Buffer.cropAndScale(
+ cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ referenceI420Buffer.release();
+ final VideoFrame.I420Buffer croppedReferenceI420Buffer = croppedReferenceBuffer.toI420();
+ croppedReferenceBuffer.release();
+
+ final VideoFrame.Buffer croppedBufferToTest =
+ bufferToTest.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+ bufferToTest.release();
+
+ final VideoFrame.I420Buffer croppedOutputI420Buffer = croppedBufferToTest.toI420();
+ croppedBufferToTest.release();
+
+ assertAlmostEqualI420Buffers(croppedReferenceI420Buffer, croppedOutputI420Buffer);
+ croppedReferenceI420Buffer.release();
+ croppedOutputI420Buffer.release();
+ }
+
+ @Test
+ @SmallTest
+ /** Test calling toI420() and comparing pixel content against I420 reference. */
+ public void testToI420() {
+ final VideoFrame.I420Buffer referenceI420Buffer = createTestI420Buffer();
+ final VideoFrame.Buffer bufferToTest = createBufferToTest(referenceI420Buffer);
+
+ final VideoFrame.I420Buffer outputI420Buffer = bufferToTest.toI420();
+ bufferToTest.release();
+
+ assertEquals(VideoFrameBufferType.I420, nativeGetBufferType(outputI420Buffer));
+ assertAlmostEqualI420Buffers(referenceI420Buffer, outputI420Buffer);
+ referenceI420Buffer.release();
+ outputI420Buffer.release();
+ }
+
+ @Test
+ @SmallTest
+ /** Pure 2x scaling with no cropping. */
+ public void testScale2x() {
+ testCropAndScale(0 /* cropX= */, 0 /* cropY= */, /* cropWidth= */ 16, /* cropHeight= */ 16,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test cropping only X direction, with no scaling. */
+ public void testCropX() {
+ testCropAndScale(8 /* cropX= */, 0 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 16,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 16);
+ }
+
+ @Test
+ @SmallTest
+ /** Test cropping only Y direction, with no scaling. */
+ public void testCropY() {
+ testCropAndScale(0 /* cropX= */, 8 /* cropY= */, /* cropWidth= */ 16, /* cropHeight= */ 8,
+ /* scaleWidth= */ 16, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test center crop, with no scaling. */
+ public void testCenterCrop() {
+ testCropAndScale(4 /* cropX= */, 4 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 8,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test non-center crop for right bottom corner, with no scaling. */
+ public void testRightBottomCornerCrop() {
+ testCropAndScale(8 /* cropX= */, 8 /* cropY= */, /* cropWidth= */ 8, /* cropHeight= */ 8,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @Test
+ @SmallTest
+ /** Test combined cropping and scaling. */
+ public void testCropAndScale() {
+ testCropAndScale(4 /* cropX= */, 4 /* cropY= */, /* cropWidth= */ 12, /* cropHeight= */ 12,
+ /* scaleWidth= */ 8, /* scaleHeight= */ 8);
+ }
+
+ @VideoFrameBufferType private static native int nativeGetBufferType(VideoFrame.Buffer buffer);
+
+ /** Returns the copy of I420Buffer using WrappedNativeI420Buffer. */
+ private static native VideoFrame.Buffer nativeGetNativeI420Buffer(
+ VideoFrame.I420Buffer i420Buffer);
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java
new file mode 100644
index 0000000000..8d7894c048
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/VideoTrackTest.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Before;
+import org.junit.Test;
+
+/** Unit tests for {@link VideoTrack}. */
+public class VideoTrackTest {
+ private PeerConnectionFactory factory;
+ private VideoSource videoSource;
+ private VideoTrack videoTrack;
+
+ @Before
+ public void setUp() {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+
+ factory = PeerConnectionFactory.builder().createPeerConnectionFactory();
+ videoSource = factory.createVideoSource(/* isScreencast= */ false);
+ videoTrack = factory.createVideoTrack("video", videoSource);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingNullVideoSink() {
+ try {
+ videoTrack.addSink(/* sink= */ null);
+ fail("Should have thrown an IllegalArgumentException.");
+ } catch (IllegalArgumentException e) {
+ // Expected path.
+ }
+ }
+
+ @Test
+ @SmallTest
+ public void testRemovingNullVideoSink() {
+ videoTrack.removeSink(/* sink= */ null);
+ }
+
+ @Test
+ @SmallTest
+ public void testRemovingNonExistantVideoSink() {
+ final VideoSink videoSink = new VideoSink() {
+ @Override
+ public void onFrame(VideoFrame frame) {}
+ };
+ videoTrack.removeSink(videoSink);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingSameVideoSinkMultipleTimes() {
+ class FrameCounter implements VideoSink {
+ private int count;
+
+ public int getCount() {
+ return count;
+ }
+
+ @Override
+ public void onFrame(VideoFrame frame) {
+ count += 1;
+ }
+ }
+ final FrameCounter frameCounter = new FrameCounter();
+
+ final VideoFrame videoFrame = new VideoFrame(
+ JavaI420Buffer.allocate(/* width= */ 32, /* height= */ 32), /* rotation= */ 0,
+ /* timestampNs= */ 0);
+
+ videoTrack.addSink(frameCounter);
+ videoTrack.addSink(frameCounter);
+ videoSource.getCapturerObserver().onFrameCaptured(videoFrame);
+
+ // Even though we called addSink() multiple times, we should only get one frame out.
+ assertEquals(1, frameCounter.count);
+ }
+
+ @Test
+ @SmallTest
+ public void testAddingAndRemovingVideoSink() {
+ final VideoFrame videoFrame = new VideoFrame(
+ JavaI420Buffer.allocate(/* width= */ 32, /* height= */ 32), /* rotation= */ 0,
+ /* timestampNs= */ 0);
+
+ final VideoSink failSink = new VideoSink() {
+ @Override
+ public void onFrame(VideoFrame frame) {
+ fail("onFrame() should not be called on removed sink");
+ }
+ };
+ videoTrack.addSink(failSink);
+ videoTrack.removeSink(failSink);
+ videoSource.getCapturerObserver().onFrameCaptured(videoFrame);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java
new file mode 100644
index 0000000000..b1badd5773
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.support.test.InstrumentationRegistry;
+import androidx.test.filters.SmallTest;
+import org.junit.Test;
+import org.webrtc.PeerConnectionFactory;
+
+// This test is intended to run on ARM and catch LoadLibrary errors when we load the WebRTC
+// JNI. It can't really be setting up calls since ARM emulators are too slow, but instantiating
+// a peer connection isn't timing-sensitive, so we can at least do that.
+public class WebRtcJniBootTest {
+ @Test
+ @SmallTest
+ public void testJniLoadsWithoutError() throws InterruptedException {
+ PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions
+ .builder(InstrumentationRegistry.getTargetContext())
+ .setNativeLibraryName(TestConstants.NATIVE_LIBRARY)
+ .createInitializationOptions());
+ PeerConnectionFactory.builder().createPeerConnectionFactory();
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java
new file mode 100644
index 0000000000..7c58e9554f
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/YuvHelperTest.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import androidx.test.filters.SmallTest;
+import java.nio.ByteBuffer;
+import org.junit.Before;
+import org.junit.Test;
+
+public class YuvHelperTest {
+ private static final int TEST_WIDTH = 3;
+ private static final int TEST_HEIGHT = 3;
+ private static final int TEST_CHROMA_WIDTH = 2;
+ private static final int TEST_CHROMA_HEIGHT = 2;
+
+ private static final int TEST_I420_STRIDE_Y = 3;
+ private static final int TEST_I420_STRIDE_V = 2;
+ private static final int TEST_I420_STRIDE_U = 4;
+
+ private static final ByteBuffer TEST_I420_Y = getTestY();
+ private static final ByteBuffer TEST_I420_U = getTestU();
+ private static final ByteBuffer TEST_I420_V = getTestV();
+
+ private static ByteBuffer getTestY() {
+ final ByteBuffer testY = ByteBuffer.allocateDirect(TEST_HEIGHT * TEST_I420_STRIDE_Y);
+ testY.put(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9});
+ return testY;
+ }
+
+ private static ByteBuffer getTestU() {
+ final ByteBuffer testU = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * TEST_I420_STRIDE_V);
+ testU.put(new byte[] {51, 52, 53, 54});
+ return testU;
+ }
+
+ private static ByteBuffer getTestV() {
+ final ByteBuffer testV = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * TEST_I420_STRIDE_U);
+ testV.put(new byte[] {101, 102, 103, 104, 105, 106, 107, 108});
+ return testV;
+ }
+
+ @Before
+ public void setUp() {
+ NativeLibrary.initialize(new NativeLibrary.DefaultLoader(), TestConstants.NATIVE_LIBRARY);
+ }
+
+ @SmallTest
+ @Test
+ public void testCopyPlane() {
+ final int dstStride = TEST_WIDTH;
+ final ByteBuffer dst = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStride);
+
+ YuvHelper.copyPlane(TEST_I420_Y, TEST_I420_STRIDE_Y, dst, dstStride, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Copy() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideU = TEST_CHROMA_WIDTH;
+ final int dstStrideV = TEST_CHROMA_WIDTH;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStrideY);
+ final ByteBuffer dstU = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * dstStrideU);
+ final ByteBuffer dstV = ByteBuffer.allocateDirect(TEST_CHROMA_HEIGHT * dstStrideV);
+
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV,
+ TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dstY);
+ assertByteBufferContentEquals(new byte[] {51, 52, 53, 54}, dstU);
+ assertByteBufferContentEquals(new byte[] {101, 102, 105, 106}, dstV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420CopyTight() {
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(
+ new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 51, 52, 53, 54, 101, 102, 105, 106}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420CopyStride() {
+ final int dstStrideY = 4;
+ final int dstSliceHeightY = 4;
+ final int dstStrideU = dstStrideY / 2;
+ final int dstSliceHeightU = dstSliceHeightY / 2;
+ final int dstSize = dstStrideY * dstStrideY * 3 / 2;
+
+ final ByteBuffer dst = ByteBuffer.allocateDirect(dstSize);
+ YuvHelper.I420Copy(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, dstStrideY, dstSliceHeightY,
+ dstStrideU, dstSliceHeightU);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 0, 0, 0, 0, 51,
+ 52, 53, 54, 101, 102, 105, 106},
+ dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideUV = TEST_CHROMA_WIDTH * 2;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStrideY);
+ final ByteBuffer dstUV = ByteBuffer.allocateDirect(2 * TEST_CHROMA_HEIGHT * dstStrideUV);
+
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstUV, dstStrideUV, TEST_WIDTH,
+ TEST_HEIGHT);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dstY);
+ assertByteBufferContentEquals(new byte[] {51, 101, 52, 102, 53, 105, 54, 106}, dstUV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12Tight() {
+ final int dstStrideY = TEST_WIDTH;
+ final int dstStrideUV = TEST_CHROMA_WIDTH * 2;
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT);
+
+ assertByteBufferContentEquals(
+ new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 51, 101, 52, 102, 53, 105, 54, 106}, dst);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420ToNV12Stride() {
+ final int dstStrideY = 4;
+ final int dstSliceHeightY = 4;
+ final int dstSize = dstStrideY * dstStrideY * 3 / 2;
+
+ final ByteBuffer dst = ByteBuffer.allocateDirect(dstSize);
+ YuvHelper.I420ToNV12(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, dstStrideY, dstSliceHeightY);
+
+ assertByteBufferContentEquals(new byte[] {1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9, 0, 0, 0, 0, 0, 51,
+ 101, 52, 102, 53, 105, 54, 106},
+ dst);
+ }
+
+ private static void assertByteBufferContentEquals(byte[] expected, ByteBuffer test) {
+ assertTrue(
+ "ByteBuffer is too small. Expected " + expected.length + " but was " + test.capacity(),
+ test.capacity() >= expected.length);
+ for (int i = 0; i < expected.length; i++) {
+ assertEquals("Unexpected ByteBuffer contents at index: " + i, expected[i], test.get(i));
+ }
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Rotate90() {
+ final int dstStrideY = TEST_HEIGHT;
+ final int dstStrideU = TEST_CHROMA_HEIGHT;
+ final int dstStrideV = TEST_CHROMA_HEIGHT;
+ final ByteBuffer dstY = ByteBuffer.allocateDirect(TEST_WIDTH * dstStrideY);
+ final ByteBuffer dstU = ByteBuffer.allocateDirect(TEST_CHROMA_WIDTH * dstStrideU);
+ final ByteBuffer dstV = ByteBuffer.allocateDirect(TEST_CHROMA_WIDTH * dstStrideV);
+
+ YuvHelper.I420Rotate(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV,
+ TEST_WIDTH, TEST_HEIGHT, 90);
+
+ assertByteBufferContentEquals(new byte[] {7, 4, 1, 8, 5, 2, 9, 6, 3}, dstY);
+ assertByteBufferContentEquals(new byte[] {53, 51, 54, 52}, dstU);
+ assertByteBufferContentEquals(new byte[] {105, 101, 106, 102}, dstV);
+ }
+
+ @SmallTest
+ @Test
+ public void testI420Rotate90Tight() {
+ final ByteBuffer dst = ByteBuffer.allocateDirect(
+ TEST_WIDTH * TEST_HEIGHT + TEST_CHROMA_WIDTH * TEST_CHROMA_HEIGHT * 2);
+
+ YuvHelper.I420Rotate(TEST_I420_Y, TEST_I420_STRIDE_Y, TEST_I420_U, TEST_I420_STRIDE_V,
+ TEST_I420_V, TEST_I420_STRIDE_U, dst, TEST_WIDTH, TEST_HEIGHT, 90);
+
+ assertByteBufferContentEquals(
+ new byte[] {7, 4, 1, 8, 5, 2, 9, 6, 3, 53, 51, 54, 52, 105, 101, 106, 102}, dst);
+ }
+}
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m
new file mode 100644
index 0000000000..ecc695a09a
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m
@@ -0,0 +1,5 @@
+YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1
+FRAME
+THIS IS JUST SOME TEXT xFRAME
+THE SECOND FRAME qwerty.FRAME
+HERE IS THE THRID FRAME!
diff --git a/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc b/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc
new file mode 100644
index 0000000000..686b232f6d
--- /dev/null
+++ b/third_party/libwebrtc/sdk/android/instrumentationtests/video_frame_buffer_test.cc
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "api/video/i420_buffer.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#include "sdk/android/src/jni/video_frame.h"
+#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
+
+namespace webrtc {
+namespace jni {
+
+JNI_FUNCTION_DECLARATION(jint,
+ VideoFrameBufferTest_nativeGetBufferType,
+ JNIEnv* jni,
+ jclass,
+ jobject video_frame_buffer) {
+ const JavaParamRef<jobject> j_video_frame_buffer(video_frame_buffer);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_video_frame_buffer);
+ return static_cast<jint>(buffer->type());
+}
+
+JNI_FUNCTION_DECLARATION(jobject,
+ VideoFrameBufferTest_nativeGetNativeI420Buffer,
+ JNIEnv* jni,
+ jclass,
+ jobject i420_buffer) {
+ const JavaParamRef<jobject> j_i420_buffer(i420_buffer);
+ rtc::scoped_refptr<VideoFrameBuffer> buffer =
+ JavaToNativeFrameBuffer(jni, j_i420_buffer);
+ const I420BufferInterface* inputBuffer = buffer->GetI420();
+ RTC_DCHECK(inputBuffer != nullptr);
+ rtc::scoped_refptr<I420Buffer> outputBuffer = I420Buffer::Copy(*inputBuffer);
+ return WrapI420Buffer(jni, outputBuffer).Release();
+}
+
+} // namespace jni
+} // namespace webrtc