From 36d22d82aa202bb199967e9512281e9a53db42c9 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 7 Apr 2024 21:33:14 +0200 Subject: Adding upstream version 115.7.0esr. Signed-off-by: Daniel Baumann --- .../examples/androidtests/AndroidManifest.xml | 26 + third_party/libwebrtc/examples/androidtests/OWNERS | 1 + third_party/libwebrtc/examples/androidtests/README | 14 + .../libwebrtc/examples/androidtests/ant.properties | 18 + .../libwebrtc/examples/androidtests/build.xml | 92 +++ .../examples/androidtests/project.properties | 16 + .../apprtc/test/PeerConnectionClientTest.java | 637 +++++++++++++++++++++ .../examples/androidtests/third_party/.gitignore | 3 + .../androidtests/third_party/README.webrtc | 10 + 9 files changed, 817 insertions(+) create mode 100644 third_party/libwebrtc/examples/androidtests/AndroidManifest.xml create mode 100644 third_party/libwebrtc/examples/androidtests/OWNERS create mode 100644 third_party/libwebrtc/examples/androidtests/README create mode 100644 third_party/libwebrtc/examples/androidtests/ant.properties create mode 100644 third_party/libwebrtc/examples/androidtests/build.xml create mode 100644 third_party/libwebrtc/examples/androidtests/project.properties create mode 100644 third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java create mode 100644 third_party/libwebrtc/examples/androidtests/third_party/.gitignore create mode 100644 third_party/libwebrtc/examples/androidtests/third_party/README.webrtc (limited to 'third_party/libwebrtc/examples/androidtests') diff --git a/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml b/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml new file mode 100644 index 0000000000..38ed3e3b9a --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/AndroidManifest.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + diff --git a/third_party/libwebrtc/examples/androidtests/OWNERS b/third_party/libwebrtc/examples/androidtests/OWNERS new file mode 100644 index 0000000000..cf092a316a --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/OWNERS @@ -0,0 +1 @@ +xalep@webrtc.org diff --git a/third_party/libwebrtc/examples/androidtests/README b/third_party/libwebrtc/examples/androidtests/README new file mode 100644 index 0000000000..0701b0e896 --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/README @@ -0,0 +1,14 @@ +This directory contains an example unit test for Android AppRTCMobile. + +Example of building & using the app: + +- Build Android AppRTCMobile and AppRTCMobile unit test: +cd /src +ninja -C out/Debug AppRTCMobile_test_apk + +- Install AppRTCMobile and AppRTCMobileTest: +adb install -r out/Debug/apks/AppRTCMobile.apk +adb install -r out/Debug/apks/AppRTCMobileTest.apk + +- Run unit tests: +adb shell am instrument -w org.appspot.apprtc.test/android.test.InstrumentationTestRunner diff --git a/third_party/libwebrtc/examples/androidtests/ant.properties b/third_party/libwebrtc/examples/androidtests/ant.properties new file mode 100644 index 0000000000..ec7d042885 --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/ant.properties @@ -0,0 +1,18 @@ +# This file is used to override default values used by the Ant build system. +# +# This file must be checked into Version Control Systems, as it is +# integral to the build system of your project. + +# This file is only used by the Ant script. + +# You can use this to override default values such as +# 'source.dir' for the location of your java source folder and +# 'out.dir' for the location of your output folder. + +# You can also use it define how the release builds are signed by declaring +# the following properties: +# 'key.store' for the location of your keystore and +# 'key.alias' for the name of the key to use. +# The password will be asked during the build when you use the 'release' target. + +tested.project.dir=../android diff --git a/third_party/libwebrtc/examples/androidtests/build.xml b/third_party/libwebrtc/examples/androidtests/build.xml new file mode 100644 index 0000000000..95847b74c2 --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/build.xml @@ -0,0 +1,92 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/third_party/libwebrtc/examples/androidtests/project.properties b/third_party/libwebrtc/examples/androidtests/project.properties new file mode 100644 index 0000000000..a6ca533fe3 --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/project.properties @@ -0,0 +1,16 @@ +# This file is automatically generated by Android Tools. +# Do not modify this file -- YOUR CHANGES WILL BE ERASED! +# +# This file must be checked in Version Control Systems. +# +# To customize properties used by the Ant build system edit +# "ant.properties", and override values to adapt the script to your +# project structure. +# +# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home): +#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt + +# Project target. +target=android-22 + +java.compilerargs=-Xlint:all -Werror diff --git a/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java b/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java new file mode 100644 index 0000000000..051d7379bd --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/src/org/appspot/apprtc/test/PeerConnectionClientTest.java @@ -0,0 +1,637 @@ +/* + * Copyright 2014 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.appspot.apprtc.test; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import android.os.Build; +import android.support.test.InstrumentationRegistry; +import android.support.test.runner.AndroidJUnit4; +import android.util.Log; +import androidx.test.filters.SmallTest; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import org.appspot.apprtc.AppRTCClient.SignalingParameters; +import org.appspot.apprtc.PeerConnectionClient; +import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents; +import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.webrtc.Camera1Enumerator; +import org.webrtc.Camera2Enumerator; +import org.webrtc.CameraEnumerator; +import org.webrtc.EglBase; +import org.webrtc.IceCandidate; +import org.webrtc.PeerConnection; +import org.webrtc.PeerConnectionFactory; +import org.webrtc.RTCStatsReport; +import org.webrtc.SessionDescription; +import org.webrtc.VideoCapturer; +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; + +@RunWith(AndroidJUnit4.class) +public class PeerConnectionClientTest implements PeerConnectionEvents { + private static final String TAG = "RTCClientTest"; + private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000; + private static final int WAIT_TIMEOUT = 7000; + private static final int CAMERA_SWITCH_ATTEMPTS = 3; + private static final int VIDEO_RESTART_ATTEMPTS = 3; + private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3; + private static final int VIDEO_RESTART_TIMEOUT = 500; + private static final int EXPECTED_VIDEO_FRAMES = 10; + private static final String VIDEO_CODEC_VP8 = "VP8"; + private static final String VIDEO_CODEC_VP9 = "VP9"; + private static final String VIDEO_CODEC_H264 = "H264"; + private static final int AUDIO_RUN_TIMEOUT = 1000; + private static final String LOCAL_RENDERER_NAME = "Local renderer"; + private static final String REMOTE_RENDERER_NAME = "Remote renderer"; + + private static final int MAX_VIDEO_FPS = 30; + private static final int WIDTH_VGA = 640; + private static final int HEIGHT_VGA = 480; + private static final int WIDTH_QVGA = 320; + private static final int HEIGHT_QVGA = 240; + + // The peer connection client is assumed to be thread safe in itself; the + // reference is written by the test thread and read by worker threads. + private volatile PeerConnectionClient pcClient; + private volatile boolean loopback; + + // These are protected by their respective event objects. + private ExecutorService signalingExecutor; + private boolean isClosed; + private boolean isIceConnected; + private SessionDescription localDesc; + private List iceCandidates = new ArrayList<>(); + private final Object localDescEvent = new Object(); + private final Object iceCandidateEvent = new Object(); + private final Object iceConnectedEvent = new Object(); + private final Object closeEvent = new Object(); + + // Mock VideoSink implementation. + private static class MockSink implements VideoSink { + // These are protected by 'this' since we gets called from worker threads. + private String rendererName; + private boolean renderFrameCalled; + + // Thread-safe in itself. + private CountDownLatch doneRendering; + + public MockSink(int expectedFrames, String rendererName) { + this.rendererName = rendererName; + reset(expectedFrames); + } + + // Resets render to wait for new amount of video frames. + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void reset(int expectedFrames) { + renderFrameCalled = false; + doneRendering = new CountDownLatch(expectedFrames); + } + + @Override + // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. + @SuppressWarnings("NoSynchronizedMethodCheck") + public synchronized void onFrame(VideoFrame frame) { + if (!renderFrameCalled) { + if (rendererName != null) { + Log.d(TAG, + rendererName + " render frame: " + frame.getRotatedWidth() + " x " + + frame.getRotatedHeight()); + } else { + Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight()); + } + } + renderFrameCalled = true; + doneRendering.countDown(); + } + + // This method shouldn't hold any locks or touch member variables since it + // blocks. + public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException { + doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS); + return (doneRendering.getCount() <= 0); + } + } + + // Peer connection events implementation. + @Override + public void onLocalDescription(SessionDescription desc) { + Log.d(TAG, "Local description type: " + desc.type); + synchronized (localDescEvent) { + localDesc = desc; + localDescEvent.notifyAll(); + } + } + + @Override + public void onIceCandidate(final IceCandidate candidate) { + synchronized (iceCandidateEvent) { + Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString()); + if (loopback) { + // Loopback local ICE candidate in a separate thread to avoid adding + // remote ICE candidate in a local ICE candidate callback. + signalingExecutor.execute(new Runnable() { + @Override + public void run() { + pcClient.addRemoteIceCandidate(candidate); + } + }); + } + iceCandidates.add(candidate); + iceCandidateEvent.notifyAll(); + } + } + + @Override + public void onIceCandidatesRemoved(final IceCandidate[] candidates) { + // TODO(honghaiz): Add this for tests. + } + + @Override + public void onIceConnected() { + Log.d(TAG, "ICE Connected"); + synchronized (iceConnectedEvent) { + isIceConnected = true; + iceConnectedEvent.notifyAll(); + } + } + + @Override + public void onIceDisconnected() { + Log.d(TAG, "ICE Disconnected"); + synchronized (iceConnectedEvent) { + isIceConnected = false; + iceConnectedEvent.notifyAll(); + } + } + + @Override + public void onConnected() { + Log.d(TAG, "DTLS Connected"); + } + + @Override + public void onDisconnected() { + Log.d(TAG, "DTLS Disconnected"); + } + + @Override + public void onPeerConnectionClosed() { + Log.d(TAG, "PeerConnection closed"); + synchronized (closeEvent) { + isClosed = true; + closeEvent.notifyAll(); + } + } + + @Override + public void onPeerConnectionError(String description) { + fail("PC Error: " + description); + } + + @Override + public void onPeerConnectionStatsReady(final RTCStatsReport report) {} + + // Helper wait functions. + private boolean waitForLocalDescription(int timeoutMs) throws InterruptedException { + synchronized (localDescEvent) { + final long endTimeMs = System.currentTimeMillis() + timeoutMs; + while (localDesc == null) { + final long waitTimeMs = endTimeMs - System.currentTimeMillis(); + if (waitTimeMs < 0) { + return false; + } + localDescEvent.wait(waitTimeMs); + } + return true; + } + } + + private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException { + synchronized (iceCandidateEvent) { + final long endTimeMs = System.currentTimeMillis() + timeoutMs; + while (iceCandidates.size() == 0) { + final long waitTimeMs = endTimeMs - System.currentTimeMillis(); + if (waitTimeMs < 0) { + return false; + } + iceCandidateEvent.wait(timeoutMs); + } + return true; + } + } + + private boolean waitForIceConnected(int timeoutMs) throws InterruptedException { + synchronized (iceConnectedEvent) { + final long endTimeMs = System.currentTimeMillis() + timeoutMs; + while (!isIceConnected) { + final long waitTimeMs = endTimeMs - System.currentTimeMillis(); + if (waitTimeMs < 0) { + Log.e(TAG, "ICE connection failure"); + return false; + } + iceConnectedEvent.wait(timeoutMs); + } + return true; + } + } + + private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException { + synchronized (closeEvent) { + final long endTimeMs = System.currentTimeMillis() + timeoutMs; + while (!isClosed) { + final long waitTimeMs = endTimeMs - System.currentTimeMillis(); + if (waitTimeMs < 0) { + return false; + } + closeEvent.wait(timeoutMs); + } + return true; + } + } + + PeerConnectionClient createPeerConnectionClient(MockSink localRenderer, MockSink remoteRenderer, + PeerConnectionParameters peerConnectionParameters, VideoCapturer videoCapturer) { + List iceServers = new ArrayList<>(); + SignalingParameters signalingParameters = + new SignalingParameters(iceServers, true, // iceServers, initiator. + null, null, null, // clientId, wssUrl, wssPostUrl. + null, null); // offerSdp, iceCandidates. + + final EglBase eglBase = EglBase.create(); + PeerConnectionClient client = + new PeerConnectionClient(InstrumentationRegistry.getTargetContext(), eglBase, + peerConnectionParameters, this /* PeerConnectionEvents */); + PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); + options.networkIgnoreMask = 0; + options.disableNetworkMonitor = true; + client.createPeerConnectionFactory(options); + client.createPeerConnection(localRenderer, remoteRenderer, videoCapturer, signalingParameters); + client.createOffer(); + return client; + } + + private PeerConnectionParameters createParametersForAudioCall() { + return new PeerConnectionParameters(false, /* videoCallEnabled */ + true, /* loopback */ + false, /* tracing */ + // Video codec parameters. + 0, /* videoWidth */ + 0, /* videoHeight */ + 0, /* videoFps */ + 0, /* videoStartBitrate */ + "", /* videoCodec */ + true, /* videoCodecHwAcceleration */ + false, /* videoFlexfecEnabled */ + // Audio codec parameters. + 0, /* audioStartBitrate */ + "OPUS", /* audioCodec */ + false, /* noAudioProcessing */ + false, /* aecDump */ + false, /* saveInputAudioToFile */ + false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */, + false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */, + null /* dataChannelParameters */); + } + + private VideoCapturer createCameraCapturer(boolean captureToTexture) { + final boolean useCamera2 = captureToTexture + && Camera2Enumerator.isSupported(InstrumentationRegistry.getTargetContext()); + + CameraEnumerator enumerator; + if (useCamera2) { + enumerator = new Camera2Enumerator(InstrumentationRegistry.getTargetContext()); + } else { + enumerator = new Camera1Enumerator(captureToTexture); + } + String deviceName = enumerator.getDeviceNames()[0]; + return enumerator.createCapturer(deviceName, null); + } + + private PeerConnectionParameters createParametersForVideoCall(String videoCodec) { + return new PeerConnectionParameters(true, /* videoCallEnabled */ + true, /* loopback */ + false, /* tracing */ + // Video codec parameters. + 0, /* videoWidth */ + 0, /* videoHeight */ + 0, /* videoFps */ + 0, /* videoStartBitrate */ + videoCodec, /* videoCodec */ + true, /* videoCodecHwAcceleration */ + false, /* videoFlexfecEnabled */ + // Audio codec parameters. + 0, /* audioStartBitrate */ + "OPUS", /* audioCodec */ + false, /* noAudioProcessing */ + false, /* aecDump */ + false, /* saveInputAudioToFile */ + false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */, + false /* disableBuiltInNS */, false /* disableWebRtcAGC */, false /* enableRtcEventLog */, + null /* dataChannelParameters */); + } + + @Before + public void setUp() { + signalingExecutor = Executors.newSingleThreadExecutor(); + } + + @After + public void tearDown() { + signalingExecutor.shutdown(); + } + + @Test + @SmallTest + public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException { + Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally"); + MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); + pcClient = createPeerConnectionClient(localRenderer, + new MockSink(/* expectedFrames= */ 0, /* rendererName= */ null), + createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */)); + + // Wait for local description and ice candidates set events. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT)); + + // Check that local video frames were rendered. + assertTrue( + "Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + + pcClient.close(); + assertTrue( + "PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT)); + Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done."); + } + + private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer, + boolean decodeToTexture) throws InterruptedException { + loopback = true; + MockSink localRenderer = null; + MockSink remoteRenderer = null; + if (parameters.videoCallEnabled) { + Log.d(TAG, "testLoopback for video " + parameters.videoCodec); + localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); + remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); + } else { + Log.d(TAG, "testLoopback for audio."); + } + pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer); + + // Wait for local description, change type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); + + // Wait for ICE connection. + assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); + + if (parameters.videoCallEnabled) { + // Check that local and remote video frames were rendered. + assertTrue("Local video frames were not rendered.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + } else { + // For audio just sleep for 1 sec. + // TODO(glaznev): check how we can detect that remote audio was rendered. + Thread.sleep(AUDIO_RUN_TIMEOUT); + } + + pcClient.close(); + assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); + Log.d(TAG, "testLoopback done."); + } + + @Test + @SmallTest + public void testLoopbackAudio() throws InterruptedException { + doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackVp8() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackVp9() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), + createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackH264() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), + createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackVp8DecodeToTexture() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackVp9DecodeToTexture() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9), + createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackH264DecodeToTexture() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), + createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackVp8CaptureToTexture() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */); + } + + @Test + @SmallTest + public void testLoopbackH264CaptureToTexture() throws InterruptedException { + doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264), + createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */); + } + + // Checks if default front camera can be switched to back camera and then + // again to front camera. + @Test + @SmallTest + public void testCameraSwitch() throws InterruptedException { + Log.d(TAG, "testCameraSwitch"); + loopback = true; + + MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); + MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); + + pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, + createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */)); + + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); + + // Wait for ICE connection. + assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); + + // Check that local and remote video frames were rendered. + assertTrue("Local video frames were not rendered before camera switch.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered before camera switch.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + + for (int i = 0; i < CAMERA_SWITCH_ATTEMPTS; i++) { + // Try to switch camera + pcClient.switchCamera(); + + // Reset video renders and check that local and remote video frames + // were rendered after camera switch. + localRenderer.reset(EXPECTED_VIDEO_FRAMES); + remoteRenderer.reset(EXPECTED_VIDEO_FRAMES); + assertTrue("Local video frames were not rendered after camera switch.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered after camera switch.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + } + pcClient.close(); + assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); + Log.d(TAG, "testCameraSwitch done."); + } + + // Checks if video source can be restarted - simulate app goes to + // background and back to foreground. + @Test + @SmallTest + public void testVideoSourceRestart() throws InterruptedException { + Log.d(TAG, "testVideoSourceRestart"); + loopback = true; + + MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); + MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); + + pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, + createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */)); + + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); + + // Wait for ICE connection. + assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); + + // Check that local and remote video frames were rendered. + assertTrue("Local video frames were not rendered before video restart.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered before video restart.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + + // Stop and then start video source a few times. + for (int i = 0; i < VIDEO_RESTART_ATTEMPTS; i++) { + pcClient.stopVideoSource(); + Thread.sleep(VIDEO_RESTART_TIMEOUT); + pcClient.startVideoSource(); + + // Reset video renders and check that local and remote video frames + // were rendered after video restart. + localRenderer.reset(EXPECTED_VIDEO_FRAMES); + remoteRenderer.reset(EXPECTED_VIDEO_FRAMES); + assertTrue("Local video frames were not rendered after video restart.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered after video restart.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + } + pcClient.close(); + assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); + Log.d(TAG, "testVideoSourceRestart done."); + } + + // Checks if capture format can be changed on fly and decoder can be reset properly. + @Test + @SmallTest + public void testCaptureFormatChange() throws InterruptedException { + Log.d(TAG, "testCaptureFormatChange"); + loopback = true; + + MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME); + MockSink remoteRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME); + + pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, + createParametersForVideoCall(VIDEO_CODEC_VP8), + createCameraCapturer(false /* captureToTexture */)); + + // Wait for local description, set type to answer and set as remote description. + assertTrue("Local description was not set.", waitForLocalDescription(WAIT_TIMEOUT)); + SessionDescription remoteDescription = new SessionDescription( + SessionDescription.Type.fromCanonicalForm("answer"), localDesc.description); + pcClient.setRemoteDescription(remoteDescription); + + // Wait for ICE connection. + assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT)); + + // Check that local and remote video frames were rendered. + assertTrue("Local video frames were not rendered before camera resolution change.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered before camera resolution change.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + + // Change capture output format a few times. + for (int i = 0; i < 2 * CAPTURE_FORMAT_CHANGE_ATTEMPTS; i++) { + if (i % 2 == 0) { + pcClient.changeCaptureFormat(WIDTH_VGA, HEIGHT_VGA, MAX_VIDEO_FPS); + } else { + pcClient.changeCaptureFormat(WIDTH_QVGA, HEIGHT_QVGA, MAX_VIDEO_FPS); + } + + // Reset video renders and check that local and remote video frames + // were rendered after capture format change. + localRenderer.reset(EXPECTED_VIDEO_FRAMES); + remoteRenderer.reset(EXPECTED_VIDEO_FRAMES); + assertTrue("Local video frames were not rendered after capture format change.", + localRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + assertTrue("Remote video frames were not rendered after capture format change.", + remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT)); + } + + pcClient.close(); + assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT)); + Log.d(TAG, "testCaptureFormatChange done."); + } +} diff --git a/third_party/libwebrtc/examples/androidtests/third_party/.gitignore b/third_party/libwebrtc/examples/androidtests/third_party/.gitignore new file mode 100644 index 0000000000..52acefb2ec --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/third_party/.gitignore @@ -0,0 +1,3 @@ +# This file is needed for projects that has this directory as a separate Git +# mirror in DEPS. Without it, a lot is wiped and re-downloaded for each sync. +/gradle diff --git a/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc b/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc new file mode 100644 index 0000000000..a6ea884923 --- /dev/null +++ b/third_party/libwebrtc/examples/androidtests/third_party/README.webrtc @@ -0,0 +1,10 @@ +The third_party directory contains sources from other projects. + +Code in third_party must document the license under which the source is being +used. If the source itself does not include a license header or file, create +an entry in this file that refers to reliable documentation of the project's +license terms on the web (and add a note pointing here in the README file in +that directory). + + + -- cgit v1.2.3