summaryrefslogtreecommitdiffstats
path: root/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video
diff options
context:
space:
mode:
Diffstat (limited to 'mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video')
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/AvcConfig.java97
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/ColorInfo.java150
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DolbyVisionConfig.java64
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DummySurface.java228
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/HevcConfig.java91
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java1873
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java975
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderException.java40
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java57
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java30
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java185
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java27
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderRenderer.java241
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java40
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java361
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoListener.java58
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoRendererEventListener.java198
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/package-info.java19
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionListener.java32
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java134
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java124
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/Projection.java236
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java238
-rw-r--r--mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/package-info.java19
24 files changed, 5517 insertions, 0 deletions
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/AvcConfig.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/AvcConfig.java
new file mode 100644
index 0000000000..2026a27ff7
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/AvcConfig.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ParserException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.CodecSpecificDataUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NalUnitUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NalUnitUtil.SpsData;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * AVC configuration data.
+ */
+public final class AvcConfig {
+
+ public final List<byte[]> initializationData;
+ public final int nalUnitLengthFieldLength;
+ public final int width;
+ public final int height;
+ public final float pixelWidthAspectRatio;
+
+ /**
+ * Parses AVC configuration data.
+ *
+ * @param data A {@link ParsableByteArray}, whose position is set to the start of the AVC
+ * configuration data to parse.
+ * @return A parsed representation of the HEVC configuration data.
+ * @throws ParserException If an error occurred parsing the data.
+ */
+ public static AvcConfig parse(ParsableByteArray data) throws ParserException {
+ try {
+ data.skipBytes(4); // Skip to the AVCDecoderConfigurationRecord (defined in 14496-15)
+ int nalUnitLengthFieldLength = (data.readUnsignedByte() & 0x3) + 1;
+ if (nalUnitLengthFieldLength == 3) {
+ throw new IllegalStateException();
+ }
+ List<byte[]> initializationData = new ArrayList<>();
+ int numSequenceParameterSets = data.readUnsignedByte() & 0x1F;
+ for (int j = 0; j < numSequenceParameterSets; j++) {
+ initializationData.add(buildNalUnitForChild(data));
+ }
+ int numPictureParameterSets = data.readUnsignedByte();
+ for (int j = 0; j < numPictureParameterSets; j++) {
+ initializationData.add(buildNalUnitForChild(data));
+ }
+
+ int width = Format.NO_VALUE;
+ int height = Format.NO_VALUE;
+ float pixelWidthAspectRatio = 1;
+ if (numSequenceParameterSets > 0) {
+ byte[] sps = initializationData.get(0);
+ SpsData spsData = NalUnitUtil.parseSpsNalUnit(initializationData.get(0),
+ nalUnitLengthFieldLength, sps.length);
+ width = spsData.width;
+ height = spsData.height;
+ pixelWidthAspectRatio = spsData.pixelWidthAspectRatio;
+ }
+ return new AvcConfig(initializationData, nalUnitLengthFieldLength, width, height,
+ pixelWidthAspectRatio);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ throw new ParserException("Error parsing AVC config", e);
+ }
+ }
+
+ private AvcConfig(List<byte[]> initializationData, int nalUnitLengthFieldLength,
+ int width, int height, float pixelWidthAspectRatio) {
+ this.initializationData = initializationData;
+ this.nalUnitLengthFieldLength = nalUnitLengthFieldLength;
+ this.width = width;
+ this.height = height;
+ this.pixelWidthAspectRatio = pixelWidthAspectRatio;
+ }
+
+ private static byte[] buildNalUnitForChild(ParsableByteArray data) {
+ int length = data.readUnsignedShort();
+ int offset = data.getPosition();
+ data.skipBytes(length);
+ return CodecSpecificDataUtil.buildNalUnit(data.data, offset, length);
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/ColorInfo.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/ColorInfo.java
new file mode 100644
index 0000000000..7eed4e3eaf
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/ColorInfo.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+import java.util.Arrays;
+
+/**
+ * Stores color info.
+ */
+public final class ColorInfo implements Parcelable {
+
+ /**
+ * The color space of the video. Valid values are {@link C#COLOR_SPACE_BT601}, {@link
+ * C#COLOR_SPACE_BT709}, {@link C#COLOR_SPACE_BT2020} or {@link Format#NO_VALUE} if unknown.
+ */
+ @C.ColorSpace
+ public final int colorSpace;
+
+ /**
+ * The color range of the video. Valid values are {@link C#COLOR_RANGE_LIMITED}, {@link
+ * C#COLOR_RANGE_FULL} or {@link Format#NO_VALUE} if unknown.
+ */
+ @C.ColorRange
+ public final int colorRange;
+
+ /**
+ * The color transfer characteristicks of the video. Valid values are {@link
+ * C#COLOR_TRANSFER_HLG}, {@link C#COLOR_TRANSFER_ST2084}, {@link C#COLOR_TRANSFER_SDR} or {@link
+ * Format#NO_VALUE} if unknown.
+ */
+ @C.ColorTransfer
+ public final int colorTransfer;
+
+ /** HdrStaticInfo as defined in CTA-861.3, or null if none specified. */
+ @Nullable public final byte[] hdrStaticInfo;
+
+ // Lazily initialized hashcode.
+ private int hashCode;
+
+ /**
+ * Constructs the ColorInfo.
+ *
+ * @param colorSpace The color space of the video.
+ * @param colorRange The color range of the video.
+ * @param colorTransfer The color transfer characteristics of the video.
+ * @param hdrStaticInfo HdrStaticInfo as defined in CTA-861.3, or null if none specified.
+ */
+ public ColorInfo(
+ @C.ColorSpace int colorSpace,
+ @C.ColorRange int colorRange,
+ @C.ColorTransfer int colorTransfer,
+ @Nullable byte[] hdrStaticInfo) {
+ this.colorSpace = colorSpace;
+ this.colorRange = colorRange;
+ this.colorTransfer = colorTransfer;
+ this.hdrStaticInfo = hdrStaticInfo;
+ }
+
+ @SuppressWarnings("ResourceType")
+ /* package */ ColorInfo(Parcel in) {
+ colorSpace = in.readInt();
+ colorRange = in.readInt();
+ colorTransfer = in.readInt();
+ boolean hasHdrStaticInfo = Util.readBoolean(in);
+ hdrStaticInfo = hasHdrStaticInfo ? in.createByteArray() : null;
+ }
+
+ // Parcelable implementation.
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ ColorInfo other = (ColorInfo) obj;
+ return colorSpace == other.colorSpace
+ && colorRange == other.colorRange
+ && colorTransfer == other.colorTransfer
+ && Arrays.equals(hdrStaticInfo, other.hdrStaticInfo);
+ }
+
+ @Override
+ public String toString() {
+ return "ColorInfo(" + colorSpace + ", " + colorRange + ", " + colorTransfer
+ + ", " + (hdrStaticInfo != null) + ")";
+ }
+
+ @Override
+ public int hashCode() {
+ if (hashCode == 0) {
+ int result = 17;
+ result = 31 * result + colorSpace;
+ result = 31 * result + colorRange;
+ result = 31 * result + colorTransfer;
+ result = 31 * result + Arrays.hashCode(hdrStaticInfo);
+ hashCode = result;
+ }
+ return hashCode;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(colorSpace);
+ dest.writeInt(colorRange);
+ dest.writeInt(colorTransfer);
+ Util.writeBoolean(dest, hdrStaticInfo != null);
+ if (hdrStaticInfo != null) {
+ dest.writeByteArray(hdrStaticInfo);
+ }
+ }
+
+ public static final Parcelable.Creator<ColorInfo> CREATOR =
+ new Parcelable.Creator<ColorInfo>() {
+ @Override
+ public ColorInfo createFromParcel(Parcel in) {
+ return new ColorInfo(in);
+ }
+
+ @Override
+ public ColorInfo[] newArray(int size) {
+ return new ColorInfo[size];
+ }
+ };
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DolbyVisionConfig.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DolbyVisionConfig.java
new file mode 100644
index 0000000000..bfc1f814d2
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DolbyVisionConfig.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
+
+/** Dolby Vision configuration data. */
+public final class DolbyVisionConfig {
+
+ /**
+ * Parses Dolby Vision configuration data.
+ *
+ * @param data A {@link ParsableByteArray}, whose position is set to the start of the Dolby Vision
+ * configuration data to parse.
+ * @return The {@link DolbyVisionConfig} corresponding to the configuration, or {@code null} if
+ * the configuration isn't supported.
+ */
+ @Nullable
+ public static DolbyVisionConfig parse(ParsableByteArray data) {
+ data.skipBytes(2); // dv_version_major, dv_version_minor
+ int profileData = data.readUnsignedByte();
+ int dvProfile = (profileData >> 1);
+ int dvLevel = ((profileData & 0x1) << 5) | ((data.readUnsignedByte() >> 3) & 0x1F);
+ String codecsPrefix;
+ if (dvProfile == 4 || dvProfile == 5 || dvProfile == 7) {
+ codecsPrefix = "dvhe";
+ } else if (dvProfile == 8) {
+ codecsPrefix = "hev1";
+ } else if (dvProfile == 9) {
+ codecsPrefix = "avc3";
+ } else {
+ return null;
+ }
+ String codecs = codecsPrefix + ".0" + dvProfile + ".0" + dvLevel;
+ return new DolbyVisionConfig(dvProfile, dvLevel, codecs);
+ }
+
+ /** The profile number. */
+ public final int profile;
+ /** The level number. */
+ public final int level;
+ /** The RFC 6381 codecs string. */
+ public final String codecs;
+
+ private DolbyVisionConfig(int profile, int level, String codecs) {
+ this.profile = profile;
+ this.level = level;
+ this.codecs = codecs;
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DummySurface.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DummySurface.java
new file mode 100644
index 0000000000..abfb8b0952
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/DummySurface.java
@@ -0,0 +1,228 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_NONE;
+import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_PROTECTED_PBUFFER;
+import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.EGLSurfaceTexture.SECURE_MODE_SURFACELESS_CONTEXT;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.os.Handler;
+import android.os.Handler.Callback;
+import android.os.HandlerThread;
+import android.os.Message;
+import android.view.Surface;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.EGLSurfaceTexture;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.EGLSurfaceTexture.SecureMode;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.GlUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
+
+/** A dummy {@link Surface}. */
+@TargetApi(17)
+public final class DummySurface extends Surface {
+
+ private static final String TAG = "DummySurface";
+
+ /**
+ * Whether the surface is secure.
+ */
+ public final boolean secure;
+
+ private static @SecureMode int secureMode;
+ private static boolean secureModeInitialized;
+
+ private final DummySurfaceThread thread;
+ private boolean threadReleased;
+
+ /**
+ * Returns whether the device supports secure dummy surfaces.
+ *
+ * @param context Any {@link Context}.
+ * @return Whether the device supports secure dummy surfaces.
+ */
+ public static synchronized boolean isSecureSupported(Context context) {
+ if (!secureModeInitialized) {
+ secureMode = getSecureMode(context);
+ secureModeInitialized = true;
+ }
+ return secureMode != SECURE_MODE_NONE;
+ }
+
+ /**
+ * Returns a newly created dummy surface. The surface must be released by calling {@link #release}
+ * when it's no longer required.
+ * <p>
+ * Must only be called if {@link Util#SDK_INT} is 17 or higher.
+ *
+ * @param context Any {@link Context}.
+ * @param secure Whether a secure surface is required. Must only be requested if
+ * {@link #isSecureSupported(Context)} returns {@code true}.
+ * @throws IllegalStateException If a secure surface is requested on a device for which
+ * {@link #isSecureSupported(Context)} returns {@code false}.
+ */
+ public static DummySurface newInstanceV17(Context context, boolean secure) {
+ assertApiLevel17OrHigher();
+ Assertions.checkState(!secure || isSecureSupported(context));
+ DummySurfaceThread thread = new DummySurfaceThread();
+ return thread.init(secure ? secureMode : SECURE_MODE_NONE);
+ }
+
+ private DummySurface(DummySurfaceThread thread, SurfaceTexture surfaceTexture, boolean secure) {
+ super(surfaceTexture);
+ this.thread = thread;
+ this.secure = secure;
+ }
+
+ @Override
+ public void release() {
+ super.release();
+ // The Surface may be released multiple times (explicitly and by Surface.finalize()). The
+ // implementation of super.release() has its own deduplication logic. Below we need to
+ // deduplicate ourselves. Synchronization is required as we don't control the thread on which
+ // Surface.finalize() is called.
+ synchronized (thread) {
+ if (!threadReleased) {
+ thread.release();
+ threadReleased = true;
+ }
+ }
+ }
+
+ private static void assertApiLevel17OrHigher() {
+ if (Util.SDK_INT < 17) {
+ throw new UnsupportedOperationException("Unsupported prior to API level 17");
+ }
+ }
+
+ @SecureMode
+ private static int getSecureMode(Context context) {
+ if (GlUtil.isProtectedContentExtensionSupported(context)) {
+ if (GlUtil.isSurfacelessContextExtensionSupported()) {
+ return SECURE_MODE_SURFACELESS_CONTEXT;
+ } else {
+ // If we can't use surfaceless contexts, we use a protected 1 * 1 pixel buffer surface.
+ // This may require support for EXT_protected_surface, but in practice it works on some
+ // devices that don't have that extension. See also
+ // https://github.com/google/ExoPlayer/issues/3558.
+ return SECURE_MODE_PROTECTED_PBUFFER;
+ }
+ } else {
+ return SECURE_MODE_NONE;
+ }
+ }
+
+ private static class DummySurfaceThread extends HandlerThread implements Callback {
+
+ private static final int MSG_INIT = 1;
+ private static final int MSG_RELEASE = 2;
+
+ private @MonotonicNonNull EGLSurfaceTexture eglSurfaceTexture;
+ private @MonotonicNonNull Handler handler;
+ @Nullable private Error initError;
+ @Nullable private RuntimeException initException;
+ @Nullable private DummySurface surface;
+
+ public DummySurfaceThread() {
+ super("dummySurface");
+ }
+
+ public DummySurface init(@SecureMode int secureMode) {
+ start();
+ handler = new Handler(getLooper(), /* callback= */ this);
+ eglSurfaceTexture = new EGLSurfaceTexture(handler);
+ boolean wasInterrupted = false;
+ synchronized (this) {
+ handler.obtainMessage(MSG_INIT, secureMode, 0).sendToTarget();
+ while (surface == null && initException == null && initError == null) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ wasInterrupted = true;
+ }
+ }
+ }
+ if (wasInterrupted) {
+ // Restore the interrupted status.
+ Thread.currentThread().interrupt();
+ }
+ if (initException != null) {
+ throw initException;
+ } else if (initError != null) {
+ throw initError;
+ } else {
+ return Assertions.checkNotNull(surface);
+ }
+ }
+
+ public void release() {
+ Assertions.checkNotNull(handler);
+ handler.sendEmptyMessage(MSG_RELEASE);
+ }
+
+ @Override
+ public boolean handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_INIT:
+ try {
+ initInternal(/* secureMode= */ msg.arg1);
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Failed to initialize dummy surface", e);
+ initException = e;
+ } catch (Error e) {
+ Log.e(TAG, "Failed to initialize dummy surface", e);
+ initError = e;
+ } finally {
+ synchronized (this) {
+ notify();
+ }
+ }
+ return true;
+ case MSG_RELEASE:
+ try {
+ releaseInternal();
+ } catch (Throwable e) {
+ Log.e(TAG, "Failed to release dummy surface", e);
+ } finally {
+ quit();
+ }
+ return true;
+ default:
+ return true;
+ }
+ }
+
+ private void initInternal(@SecureMode int secureMode) {
+ Assertions.checkNotNull(eglSurfaceTexture);
+ eglSurfaceTexture.init(secureMode);
+ this.surface =
+ new DummySurface(
+ this, eglSurfaceTexture.getSurfaceTexture(), secureMode != SECURE_MODE_NONE);
+ }
+
+ private void releaseInternal() {
+ Assertions.checkNotNull(eglSurfaceTexture);
+ eglSurfaceTexture.release();
+ }
+
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/HevcConfig.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/HevcConfig.java
new file mode 100644
index 0000000000..844712146a
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/HevcConfig.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ParserException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NalUnitUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * HEVC configuration data.
+ */
+public final class HevcConfig {
+
+ @Nullable public final List<byte[]> initializationData;
+ public final int nalUnitLengthFieldLength;
+
+ /**
+ * Parses HEVC configuration data.
+ *
+ * @param data A {@link ParsableByteArray}, whose position is set to the start of the HEVC
+ * configuration data to parse.
+ * @return A parsed representation of the HEVC configuration data.
+ * @throws ParserException If an error occurred parsing the data.
+ */
+ public static HevcConfig parse(ParsableByteArray data) throws ParserException {
+ try {
+ data.skipBytes(21); // Skip to the NAL unit length size field.
+ int lengthSizeMinusOne = data.readUnsignedByte() & 0x03;
+
+ // Calculate the combined size of all VPS/SPS/PPS bitstreams.
+ int numberOfArrays = data.readUnsignedByte();
+ int csdLength = 0;
+ int csdStartPosition = data.getPosition();
+ for (int i = 0; i < numberOfArrays; i++) {
+ data.skipBytes(1); // completeness (1), nal_unit_type (7)
+ int numberOfNalUnits = data.readUnsignedShort();
+ for (int j = 0; j < numberOfNalUnits; j++) {
+ int nalUnitLength = data.readUnsignedShort();
+ csdLength += 4 + nalUnitLength; // Start code and NAL unit.
+ data.skipBytes(nalUnitLength);
+ }
+ }
+
+ // Concatenate the codec-specific data into a single buffer.
+ data.setPosition(csdStartPosition);
+ byte[] buffer = new byte[csdLength];
+ int bufferPosition = 0;
+ for (int i = 0; i < numberOfArrays; i++) {
+ data.skipBytes(1); // completeness (1), nal_unit_type (7)
+ int numberOfNalUnits = data.readUnsignedShort();
+ for (int j = 0; j < numberOfNalUnits; j++) {
+ int nalUnitLength = data.readUnsignedShort();
+ System.arraycopy(NalUnitUtil.NAL_START_CODE, 0, buffer, bufferPosition,
+ NalUnitUtil.NAL_START_CODE.length);
+ bufferPosition += NalUnitUtil.NAL_START_CODE.length;
+ System
+ .arraycopy(data.data, data.getPosition(), buffer, bufferPosition, nalUnitLength);
+ bufferPosition += nalUnitLength;
+ data.skipBytes(nalUnitLength);
+ }
+ }
+
+ List<byte[]> initializationData = csdLength == 0 ? null : Collections.singletonList(buffer);
+ return new HevcConfig(initializationData, lengthSizeMinusOne + 1);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ throw new ParserException("Error parsing HEVC config", e);
+ }
+ }
+
+ private HevcConfig(@Nullable List<byte[]> initializationData, int nalUnitLengthFieldLength) {
+ this.initializationData = initializationData;
+ this.nalUnitLengthFieldLength = nalUnitLengthFieldLength;
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java
new file mode 100644
index 0000000000..1627b70a28
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/MediaCodecVideoRenderer.java
@@ -0,0 +1,1873 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.annotation.SuppressLint;
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.Point;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo.CodecProfileLevel;
+import android.media.MediaCrypto;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+import android.util.Pair;
+import android.view.Surface;
+import androidx.annotation.CallSuper;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlayer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.FormatHolder;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.PlayerMessage.Target;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.RendererCapabilities;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmInitData;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.mediacodec.MediaFormatUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.source.MediaSource;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Log;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.TraceUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Decodes and renders video using {@link MediaCodec}.
+ *
+ * <p>This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)}
+ * on the playback thread:
+ *
+ * <ul>
+ * <li>Message with type {@link C#MSG_SET_SURFACE} to set the output surface. The message payload
+ * should be the target {@link Surface}, or null.
+ * <li>Message with type {@link C#MSG_SET_SCALING_MODE} to set the video scaling mode. The message
+ * payload should be one of the integer scaling modes in {@link C.VideoScalingMode}. Note that
+ * the scaling mode only applies if the {@link Surface} targeted by this renderer is owned by
+ * a {@link android.view.SurfaceView}.
+ * </ul>
+ */
+public class MediaCodecVideoRenderer extends MediaCodecRenderer {
+
+ private static final String TAG = "MediaCodecVideoRenderer";
+ private static final String KEY_CROP_LEFT = "crop-left";
+ private static final String KEY_CROP_RIGHT = "crop-right";
+ private static final String KEY_CROP_BOTTOM = "crop-bottom";
+ private static final String KEY_CROP_TOP = "crop-top";
+
+ // Long edge length in pixels for standard video formats, in decreasing in order.
+ private static final int[] STANDARD_LONG_EDGE_VIDEO_PX = new int[] {
+ 1920, 1600, 1440, 1280, 960, 854, 640, 540, 480};
+
+ // Generally there is zero or one pending output stream offset. We track more offsets to allow for
+ // pending output streams that have fewer frames than the codec latency.
+ private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10;
+ /**
+ * Scale factor for the initial maximum input size used to configure the codec in non-adaptive
+ * playbacks. See {@link #getCodecMaxValues(MediaCodecInfo, Format, Format[])}.
+ */
+ private static final float INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR = 1.5f;
+
+ /** Magic frame render timestamp that indicates the EOS in tunneling mode. */
+ private static final long TUNNELING_EOS_PRESENTATION_TIME_US = Long.MAX_VALUE;
+
+ /** A {@link DecoderException} with additional surface information. */
+ public static final class VideoDecoderException extends DecoderException {
+
+ /** The {@link System#identityHashCode(Object)} of the surface when the exception occurred. */
+ public final int surfaceIdentityHashCode;
+
+ /** Whether the surface was valid when the exception occurred. */
+ public final boolean isSurfaceValid;
+
+ public VideoDecoderException(
+ Throwable cause, @Nullable MediaCodecInfo codecInfo, @Nullable Surface surface) {
+ super(cause, codecInfo);
+ surfaceIdentityHashCode = System.identityHashCode(surface);
+ isSurfaceValid = surface == null || surface.isValid();
+ }
+ }
+
+ private static boolean evaluatedDeviceNeedsSetOutputSurfaceWorkaround;
+ private static boolean deviceNeedsSetOutputSurfaceWorkaround;
+
+ private final Context context;
+ private final VideoFrameReleaseTimeHelper frameReleaseTimeHelper;
+ private final EventDispatcher eventDispatcher;
+ private final long allowedJoiningTimeMs;
+ private final int maxDroppedFramesToNotify;
+ private final boolean deviceNeedsNoPostProcessWorkaround;
+ private final long[] pendingOutputStreamOffsetsUs;
+ private final long[] pendingOutputStreamSwitchTimesUs;
+
+ private CodecMaxValues codecMaxValues;
+ private boolean codecNeedsSetOutputSurfaceWorkaround;
+ private boolean codecHandlesHdr10PlusOutOfBandMetadata;
+
+ private Surface surface;
+ private Surface dummySurface;
+ @C.VideoScalingMode
+ private int scalingMode;
+ private boolean renderedFirstFrame;
+ private long initialPositionUs;
+ private long joiningDeadlineMs;
+ private long droppedFrameAccumulationStartTimeMs;
+ private int droppedFrames;
+ private int consecutiveDroppedFrameCount;
+ private int buffersInCodecCount;
+ private long lastRenderTimeUs;
+
+ private int pendingRotationDegrees;
+ private float pendingPixelWidthHeightRatio;
+ @Nullable private MediaFormat currentMediaFormat;
+ private int currentWidth;
+ private int currentHeight;
+ private int currentUnappliedRotationDegrees;
+ private float currentPixelWidthHeightRatio;
+ private int reportedWidth;
+ private int reportedHeight;
+ private int reportedUnappliedRotationDegrees;
+ private float reportedPixelWidthHeightRatio;
+
+ private boolean tunneling;
+ private int tunnelingAudioSessionId;
+ /* package */ @Nullable OnFrameRenderedListenerV23 tunnelingOnFrameRenderedListener;
+
+ private long lastInputTimeUs;
+ private long outputStreamOffsetUs;
+ private int pendingOutputStreamOffsetCount;
+ @Nullable private VideoFrameMetadataListener frameMetadataListener;
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ */
+ public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
+ this(context, mediaCodecSelector, 0);
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ */
+ public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector,
+ long allowedJoiningTimeMs) {
+ this(
+ context,
+ mediaCodecSelector,
+ allowedJoiningTimeMs,
+ /* eventHandler= */ null,
+ /* eventListener= */ null,
+ /* maxDroppedFramesToNotify= */ -1);
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+ */
+ @SuppressWarnings("deprecation")
+ public MediaCodecVideoRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ long allowedJoiningTimeMs,
+ @Nullable Handler eventHandler,
+ @Nullable VideoRendererEventListener eventListener,
+ int maxDroppedFramesToNotify) {
+ this(
+ context,
+ mediaCodecSelector,
+ allowedJoiningTimeMs,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false,
+ eventHandler,
+ eventListener,
+ maxDroppedFramesToNotify);
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
+ * content is not required.
+ * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
+ * For example a media file may start with a short clear region so as to allow playback to
+ * begin in parallel with key acquisition. This parameter specifies whether the renderer is
+ * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
+ * has obtained the keys necessary to decrypt encrypted regions of the media.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+ * @deprecated Use {@link #MediaCodecVideoRenderer(Context, MediaCodecSelector, long, boolean,
+ * Handler, VideoRendererEventListener, int)} instead, and pass DRM-related parameters to the
+ * {@link MediaSource} factories.
+ */
+ @Deprecated
+ @SuppressWarnings("deprecation")
+ public MediaCodecVideoRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ long allowedJoiningTimeMs,
+ @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
+ boolean playClearSamplesWithoutKeys,
+ @Nullable Handler eventHandler,
+ @Nullable VideoRendererEventListener eventListener,
+ int maxDroppedFramesToNotify) {
+ this(
+ context,
+ mediaCodecSelector,
+ allowedJoiningTimeMs,
+ drmSessionManager,
+ playClearSamplesWithoutKeys,
+ /* enableDecoderFallback= */ false,
+ eventHandler,
+ eventListener,
+ maxDroppedFramesToNotify);
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
+ * initialization fails. This may result in using a decoder that is slower/less efficient than
+ * the primary decoder.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+ */
+ @SuppressWarnings("deprecation")
+ public MediaCodecVideoRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ long allowedJoiningTimeMs,
+ boolean enableDecoderFallback,
+ @Nullable Handler eventHandler,
+ @Nullable VideoRendererEventListener eventListener,
+ int maxDroppedFramesToNotify) {
+ this(
+ context,
+ mediaCodecSelector,
+ allowedJoiningTimeMs,
+ /* drmSessionManager= */ null,
+ /* playClearSamplesWithoutKeys= */ false,
+ enableDecoderFallback,
+ eventHandler,
+ eventListener,
+ maxDroppedFramesToNotify);
+ }
+
+ /**
+ * @param context A context.
+ * @param mediaCodecSelector A decoder selector.
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ * @param drmSessionManager For use with encrypted content. May be null if support for encrypted
+ * content is not required.
+ * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
+ * For example a media file may start with a short clear region so as to allow playback to
+ * begin in parallel with key acquisition. This parameter specifies whether the renderer is
+ * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
+ * has obtained the keys necessary to decrypt encrypted regions of the media.
+ * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
+ * initialization fails. This may result in using a decoder that is slower/less efficient than
+ * the primary decoder.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+ * @deprecated Use {@link #MediaCodecVideoRenderer(Context, MediaCodecSelector, long, boolean,
+ * Handler, VideoRendererEventListener, int)} instead, and pass DRM-related parameters to the
+ * {@link MediaSource} factories.
+ */
+ @Deprecated
+ public MediaCodecVideoRenderer(
+ Context context,
+ MediaCodecSelector mediaCodecSelector,
+ long allowedJoiningTimeMs,
+ @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
+ boolean playClearSamplesWithoutKeys,
+ boolean enableDecoderFallback,
+ @Nullable Handler eventHandler,
+ @Nullable VideoRendererEventListener eventListener,
+ int maxDroppedFramesToNotify) {
+ super(
+ C.TRACK_TYPE_VIDEO,
+ mediaCodecSelector,
+ drmSessionManager,
+ playClearSamplesWithoutKeys,
+ enableDecoderFallback,
+ /* assumedMinimumCodecOperatingRate= */ 30);
+ this.allowedJoiningTimeMs = allowedJoiningTimeMs;
+ this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
+ this.context = context.getApplicationContext();
+ frameReleaseTimeHelper = new VideoFrameReleaseTimeHelper(this.context);
+ eventDispatcher = new EventDispatcher(eventHandler, eventListener);
+ deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround();
+ pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT];
+ pendingOutputStreamSwitchTimesUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT];
+ outputStreamOffsetUs = C.TIME_UNSET;
+ lastInputTimeUs = C.TIME_UNSET;
+ joiningDeadlineMs = C.TIME_UNSET;
+ currentWidth = Format.NO_VALUE;
+ currentHeight = Format.NO_VALUE;
+ currentPixelWidthHeightRatio = Format.NO_VALUE;
+ pendingPixelWidthHeightRatio = Format.NO_VALUE;
+ scalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
+ clearReportedVideoSize();
+ }
+
+ @Override
+ @Capabilities
+ protected int supportsFormat(
+ MediaCodecSelector mediaCodecSelector,
+ @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
+ Format format)
+ throws DecoderQueryException {
+ String mimeType = format.sampleMimeType;
+ if (!MimeTypes.isVideo(mimeType)) {
+ return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
+ }
+ @Nullable DrmInitData drmInitData = format.drmInitData;
+ // Assume encrypted content requires secure decoders.
+ boolean requiresSecureDecryption = drmInitData != null;
+ List<MediaCodecInfo> decoderInfos =
+ getDecoderInfos(
+ mediaCodecSelector,
+ format,
+ requiresSecureDecryption,
+ /* requiresTunnelingDecoder= */ false);
+ if (requiresSecureDecryption && decoderInfos.isEmpty()) {
+ // No secure decoders are available. Fall back to non-secure decoders.
+ decoderInfos =
+ getDecoderInfos(
+ mediaCodecSelector,
+ format,
+ /* requiresSecureDecoder= */ false,
+ /* requiresTunnelingDecoder= */ false);
+ }
+ if (decoderInfos.isEmpty()) {
+ return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE);
+ }
+ boolean supportsFormatDrm =
+ drmInitData == null
+ || FrameworkMediaCrypto.class.equals(format.exoMediaCryptoType)
+ || (format.exoMediaCryptoType == null
+ && supportsFormatDrm(drmSessionManager, drmInitData));
+ if (!supportsFormatDrm) {
+ return RendererCapabilities.create(FORMAT_UNSUPPORTED_DRM);
+ }
+ // Check capabilities for the first decoder in the list, which takes priority.
+ MediaCodecInfo decoderInfo = decoderInfos.get(0);
+ boolean isFormatSupported = decoderInfo.isFormatSupported(format);
+ @AdaptiveSupport
+ int adaptiveSupport =
+ decoderInfo.isSeamlessAdaptationSupported(format)
+ ? ADAPTIVE_SEAMLESS
+ : ADAPTIVE_NOT_SEAMLESS;
+ @TunnelingSupport int tunnelingSupport = TUNNELING_NOT_SUPPORTED;
+ if (isFormatSupported) {
+ List<MediaCodecInfo> tunnelingDecoderInfos =
+ getDecoderInfos(
+ mediaCodecSelector,
+ format,
+ requiresSecureDecryption,
+ /* requiresTunnelingDecoder= */ true);
+ if (!tunnelingDecoderInfos.isEmpty()) {
+ MediaCodecInfo tunnelingDecoderInfo = tunnelingDecoderInfos.get(0);
+ if (tunnelingDecoderInfo.isFormatSupported(format)
+ && tunnelingDecoderInfo.isSeamlessAdaptationSupported(format)) {
+ tunnelingSupport = TUNNELING_SUPPORTED;
+ }
+ }
+ }
+ @FormatSupport
+ int formatSupport = isFormatSupported ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES;
+ return RendererCapabilities.create(formatSupport, adaptiveSupport, tunnelingSupport);
+ }
+
+ @Override
+ protected List<MediaCodecInfo> getDecoderInfos(
+ MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder)
+ throws DecoderQueryException {
+ return getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder, tunneling);
+ }
+
+ private static List<MediaCodecInfo> getDecoderInfos(
+ MediaCodecSelector mediaCodecSelector,
+ Format format,
+ boolean requiresSecureDecoder,
+ boolean requiresTunnelingDecoder)
+ throws DecoderQueryException {
+ @Nullable String mimeType = format.sampleMimeType;
+ if (mimeType == null) {
+ return Collections.emptyList();
+ }
+ List<MediaCodecInfo> decoderInfos =
+ mediaCodecSelector.getDecoderInfos(
+ mimeType, requiresSecureDecoder, requiresTunnelingDecoder);
+ decoderInfos = MediaCodecUtil.getDecoderInfosSortedByFormatSupport(decoderInfos, format);
+ if (MimeTypes.VIDEO_DOLBY_VISION.equals(mimeType)) {
+ // Fall back to H.264/AVC or H.265/HEVC for the relevant DV profiles.
+ @Nullable
+ Pair<Integer, Integer> codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format);
+ if (codecProfileAndLevel != null) {
+ int profile = codecProfileAndLevel.first;
+ if (profile == CodecProfileLevel.DolbyVisionProfileDvheDtr
+ || profile == CodecProfileLevel.DolbyVisionProfileDvheSt) {
+ decoderInfos.addAll(
+ mediaCodecSelector.getDecoderInfos(
+ MimeTypes.VIDEO_H265, requiresSecureDecoder, requiresTunnelingDecoder));
+ } else if (profile == CodecProfileLevel.DolbyVisionProfileDvavSe) {
+ decoderInfos.addAll(
+ mediaCodecSelector.getDecoderInfos(
+ MimeTypes.VIDEO_H264, requiresSecureDecoder, requiresTunnelingDecoder));
+ }
+ }
+ }
+ return Collections.unmodifiableList(decoderInfos);
+ }
+
+ @Override
+ protected void onEnabled(boolean joining) throws ExoPlaybackException {
+ super.onEnabled(joining);
+ int oldTunnelingAudioSessionId = tunnelingAudioSessionId;
+ tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
+ tunneling = tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET;
+ if (tunnelingAudioSessionId != oldTunnelingAudioSessionId) {
+ releaseCodec();
+ }
+ eventDispatcher.enabled(decoderCounters);
+ frameReleaseTimeHelper.enable();
+ }
+
+ @Override
+ protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
+ if (outputStreamOffsetUs == C.TIME_UNSET) {
+ outputStreamOffsetUs = offsetUs;
+ } else {
+ if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) {
+ Log.w(TAG, "Too many stream changes, so dropping offset: "
+ + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]);
+ } else {
+ pendingOutputStreamOffsetCount++;
+ }
+ pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs;
+ pendingOutputStreamSwitchTimesUs[pendingOutputStreamOffsetCount - 1] = lastInputTimeUs;
+ }
+ super.onStreamChanged(formats, offsetUs);
+ }
+
+ @Override
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
+ super.onPositionReset(positionUs, joining);
+ clearRenderedFirstFrame();
+ initialPositionUs = C.TIME_UNSET;
+ consecutiveDroppedFrameCount = 0;
+ lastInputTimeUs = C.TIME_UNSET;
+ if (pendingOutputStreamOffsetCount != 0) {
+ outputStreamOffsetUs = pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1];
+ pendingOutputStreamOffsetCount = 0;
+ }
+ if (joining) {
+ setJoiningDeadlineMs();
+ } else {
+ joiningDeadlineMs = C.TIME_UNSET;
+ }
+ }
+
+ @Override
+ public boolean isReady() {
+ if (super.isReady() && (renderedFirstFrame || (dummySurface != null && surface == dummySurface)
+ || getCodec() == null || tunneling)) {
+ // Ready. If we were joining then we've now joined, so clear the joining deadline.
+ joiningDeadlineMs = C.TIME_UNSET;
+ return true;
+ } else if (joiningDeadlineMs == C.TIME_UNSET) {
+ // Not joining.
+ return false;
+ } else if (SystemClock.elapsedRealtime() < joiningDeadlineMs) {
+ // Joining and still within the joining deadline.
+ return true;
+ } else {
+ // The joining deadline has been exceeded. Give up and clear the deadline.
+ joiningDeadlineMs = C.TIME_UNSET;
+ return false;
+ }
+ }
+
+ @Override
+ protected void onStarted() {
+ super.onStarted();
+ droppedFrames = 0;
+ droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
+ lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
+ }
+
+ @Override
+ protected void onStopped() {
+ joiningDeadlineMs = C.TIME_UNSET;
+ maybeNotifyDroppedFrames();
+ super.onStopped();
+ }
+
+ @Override
+ protected void onDisabled() {
+ lastInputTimeUs = C.TIME_UNSET;
+ outputStreamOffsetUs = C.TIME_UNSET;
+ pendingOutputStreamOffsetCount = 0;
+ currentMediaFormat = null;
+ clearReportedVideoSize();
+ clearRenderedFirstFrame();
+ frameReleaseTimeHelper.disable();
+ tunnelingOnFrameRenderedListener = null;
+ try {
+ super.onDisabled();
+ } finally {
+ eventDispatcher.disabled(decoderCounters);
+ }
+ }
+
+ @Override
+ protected void onReset() {
+ try {
+ super.onReset();
+ } finally {
+ if (dummySurface != null) {
+ if (surface == dummySurface) {
+ surface = null;
+ }
+ dummySurface.release();
+ dummySurface = null;
+ }
+ }
+ }
+
+ @Override
+ public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException {
+ if (messageType == C.MSG_SET_SURFACE) {
+ setSurface((Surface) message);
+ } else if (messageType == C.MSG_SET_SCALING_MODE) {
+ scalingMode = (Integer) message;
+ MediaCodec codec = getCodec();
+ if (codec != null) {
+ codec.setVideoScalingMode(scalingMode);
+ }
+ } else if (messageType == C.MSG_SET_VIDEO_FRAME_METADATA_LISTENER) {
+ frameMetadataListener = (VideoFrameMetadataListener) message;
+ } else {
+ super.handleMessage(messageType, message);
+ }
+ }
+
+ private void setSurface(Surface surface) throws ExoPlaybackException {
+ if (surface == null) {
+ // Use a dummy surface if possible.
+ if (dummySurface != null) {
+ surface = dummySurface;
+ } else {
+ MediaCodecInfo codecInfo = getCodecInfo();
+ if (codecInfo != null && shouldUseDummySurface(codecInfo)) {
+ dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure);
+ surface = dummySurface;
+ }
+ }
+ }
+ // We only need to update the codec if the surface has changed.
+ if (this.surface != surface) {
+ this.surface = surface;
+ @State int state = getState();
+ MediaCodec codec = getCodec();
+ if (codec != null) {
+ if (Util.SDK_INT >= 23 && surface != null && !codecNeedsSetOutputSurfaceWorkaround) {
+ setOutputSurfaceV23(codec, surface);
+ } else {
+ releaseCodec();
+ maybeInitCodec();
+ }
+ }
+ if (surface != null && surface != dummySurface) {
+ // If we know the video size, report it again immediately.
+ maybeRenotifyVideoSizeChanged();
+ // We haven't rendered to the new surface yet.
+ clearRenderedFirstFrame();
+ if (state == STATE_STARTED) {
+ setJoiningDeadlineMs();
+ }
+ } else {
+ // The surface has been removed.
+ clearReportedVideoSize();
+ clearRenderedFirstFrame();
+ }
+ } else if (surface != null && surface != dummySurface) {
+ // The surface is set and unchanged. If we know the video size and/or have already rendered to
+ // the surface, report these again immediately.
+ maybeRenotifyVideoSizeChanged();
+ maybeRenotifyRenderedFirstFrame();
+ }
+ }
+
+ @Override
+ protected boolean shouldInitCodec(MediaCodecInfo codecInfo) {
+ return surface != null || shouldUseDummySurface(codecInfo);
+ }
+
+ @Override
+ protected boolean getCodecNeedsEosPropagation() {
+ // Since API 23, onFrameRenderedListener allows for detection of the renderer EOS.
+ return tunneling && Util.SDK_INT < 23;
+ }
+
+ @Override
+ protected void configureCodec(
+ MediaCodecInfo codecInfo,
+ MediaCodec codec,
+ Format format,
+ @Nullable MediaCrypto crypto,
+ float codecOperatingRate) {
+ String codecMimeType = codecInfo.codecMimeType;
+ codecMaxValues = getCodecMaxValues(codecInfo, format, getStreamFormats());
+ MediaFormat mediaFormat =
+ getMediaFormat(
+ format,
+ codecMimeType,
+ codecMaxValues,
+ codecOperatingRate,
+ deviceNeedsNoPostProcessWorkaround,
+ tunnelingAudioSessionId);
+ if (surface == null) {
+ Assertions.checkState(shouldUseDummySurface(codecInfo));
+ if (dummySurface == null) {
+ dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure);
+ }
+ surface = dummySurface;
+ }
+ codec.configure(mediaFormat, surface, crypto, 0);
+ if (Util.SDK_INT >= 23 && tunneling) {
+ tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec);
+ }
+ }
+
+ @Override
+ protected @KeepCodecResult int canKeepCodec(
+ MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) {
+ if (codecInfo.isSeamlessAdaptationSupported(
+ oldFormat, newFormat, /* isNewFormatComplete= */ true)
+ && newFormat.width <= codecMaxValues.width
+ && newFormat.height <= codecMaxValues.height
+ && getMaxInputSize(codecInfo, newFormat) <= codecMaxValues.inputSize) {
+ return oldFormat.initializationDataEquals(newFormat)
+ ? KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION
+ : KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION;
+ }
+ return KEEP_CODEC_RESULT_NO;
+ }
+
+ @CallSuper
+ @Override
+ protected void releaseCodec() {
+ try {
+ super.releaseCodec();
+ } finally {
+ buffersInCodecCount = 0;
+ }
+ }
+
+ @CallSuper
+ @Override
+ protected boolean flushOrReleaseCodec() {
+ try {
+ return super.flushOrReleaseCodec();
+ } finally {
+ buffersInCodecCount = 0;
+ }
+ }
+
+ @Override
+ protected float getCodecOperatingRateV23(
+ float operatingRate, Format format, Format[] streamFormats) {
+ // Use the highest known stream frame-rate up front, to avoid having to reconfigure the codec
+ // should an adaptive switch to that stream occur.
+ float maxFrameRate = -1;
+ for (Format streamFormat : streamFormats) {
+ float streamFrameRate = streamFormat.frameRate;
+ if (streamFrameRate != Format.NO_VALUE) {
+ maxFrameRate = Math.max(maxFrameRate, streamFrameRate);
+ }
+ }
+ return maxFrameRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxFrameRate * operatingRate);
+ }
+
+ @Override
+ protected void onCodecInitialized(String name, long initializedTimestampMs,
+ long initializationDurationMs) {
+ eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs);
+ codecNeedsSetOutputSurfaceWorkaround = codecNeedsSetOutputSurfaceWorkaround(name);
+ codecHandlesHdr10PlusOutOfBandMetadata =
+ Assertions.checkNotNull(getCodecInfo()).isHdr10PlusOutOfBandMetadataSupported();
+ }
+
+ @Override
+ protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
+ super.onInputFormatChanged(formatHolder);
+ Format newFormat = formatHolder.format;
+ eventDispatcher.inputFormatChanged(newFormat);
+ pendingPixelWidthHeightRatio = newFormat.pixelWidthHeightRatio;
+ pendingRotationDegrees = newFormat.rotationDegrees;
+ }
+
+ /**
+ * Called immediately before an input buffer is queued into the codec.
+ *
+ * @param buffer The buffer to be queued.
+ */
+ @CallSuper
+ @Override
+ protected void onQueueInputBuffer(DecoderInputBuffer buffer) {
+ // In tunneling mode the device may do frame rate conversion, so in general we can't keep track
+ // of the number of buffers in the codec.
+ if (!tunneling) {
+ buffersInCodecCount++;
+ }
+ lastInputTimeUs = Math.max(buffer.timeUs, lastInputTimeUs);
+ if (Util.SDK_INT < 23 && tunneling) {
+ // In tunneled mode before API 23 we don't have a way to know when the buffer is output, so
+ // treat it as if it were output immediately.
+ onProcessedTunneledBuffer(buffer.timeUs);
+ }
+ }
+
+ @Override
+ protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) {
+ currentMediaFormat = outputMediaFormat;
+ boolean hasCrop =
+ outputMediaFormat.containsKey(KEY_CROP_RIGHT)
+ && outputMediaFormat.containsKey(KEY_CROP_LEFT)
+ && outputMediaFormat.containsKey(KEY_CROP_BOTTOM)
+ && outputMediaFormat.containsKey(KEY_CROP_TOP);
+ int width =
+ hasCrop
+ ? outputMediaFormat.getInteger(KEY_CROP_RIGHT)
+ - outputMediaFormat.getInteger(KEY_CROP_LEFT)
+ + 1
+ : outputMediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int height =
+ hasCrop
+ ? outputMediaFormat.getInteger(KEY_CROP_BOTTOM)
+ - outputMediaFormat.getInteger(KEY_CROP_TOP)
+ + 1
+ : outputMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ processOutputFormat(codec, width, height);
+ }
+
+ @Override
+ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer)
+ throws ExoPlaybackException {
+ if (!codecHandlesHdr10PlusOutOfBandMetadata) {
+ return;
+ }
+ ByteBuffer data = Assertions.checkNotNull(buffer.supplementalData);
+ if (data.remaining() >= 7) {
+ // Check for HDR10+ out-of-band metadata. See User_data_registered_itu_t_t35 in ST 2094-40.
+ byte ituTT35CountryCode = data.get();
+ int ituTT35TerminalProviderCode = data.getShort();
+ int ituTT35TerminalProviderOrientedCode = data.getShort();
+ byte applicationIdentifier = data.get();
+ byte applicationVersion = data.get();
+ data.position(0);
+ if (ituTT35CountryCode == (byte) 0xB5
+ && ituTT35TerminalProviderCode == 0x003C
+ && ituTT35TerminalProviderOrientedCode == 0x0001
+ && applicationIdentifier == 4
+ && applicationVersion == 0) {
+ // The metadata size may vary so allocate a new array every time. This is not too
+ // inefficient because the metadata is only a few tens of bytes.
+ byte[] hdr10PlusInfo = new byte[data.remaining()];
+ data.get(hdr10PlusInfo);
+ data.position(0);
+ // If codecHandlesHdr10PlusOutOfBandMetadata is true, this is an API 29 or later build.
+ setHdr10PlusInfoV29(getCodec(), hdr10PlusInfo);
+ }
+ }
+ }
+
+ @Override
+ protected boolean processOutputBuffer(
+ long positionUs,
+ long elapsedRealtimeUs,
+ MediaCodec codec,
+ ByteBuffer buffer,
+ int bufferIndex,
+ int bufferFlags,
+ long bufferPresentationTimeUs,
+ boolean isDecodeOnlyBuffer,
+ boolean isLastBuffer,
+ Format format)
+ throws ExoPlaybackException {
+ if (initialPositionUs == C.TIME_UNSET) {
+ initialPositionUs = positionUs;
+ }
+
+ long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs;
+
+ if (isDecodeOnlyBuffer && !isLastBuffer) {
+ skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ return true;
+ }
+
+ long earlyUs = bufferPresentationTimeUs - positionUs;
+ if (surface == dummySurface) {
+ // Skip frames in sync with playback, so we'll be at the right frame if the mode changes.
+ if (isBufferLate(earlyUs)) {
+ skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ return true;
+ }
+ return false;
+ }
+
+ long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000;
+ long elapsedSinceLastRenderUs = elapsedRealtimeNowUs - lastRenderTimeUs;
+ boolean isStarted = getState() == STATE_STARTED;
+ // Don't force output until we joined and the position reached the current stream.
+ boolean forceRenderOutputBuffer =
+ joiningDeadlineMs == C.TIME_UNSET
+ && positionUs >= outputStreamOffsetUs
+ && (!renderedFirstFrame
+ || (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs)));
+ if (forceRenderOutputBuffer) {
+ long releaseTimeNs = System.nanoTime();
+ notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format, currentMediaFormat);
+ if (Util.SDK_INT >= 21) {
+ renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs);
+ } else {
+ renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ }
+ return true;
+ }
+
+ if (!isStarted || positionUs == initialPositionUs) {
+ return false;
+ }
+
+ // Fine-grained adjustment of earlyUs based on the elapsed time since the start of the current
+ // iteration of the rendering loop.
+ long elapsedSinceStartOfLoopUs = elapsedRealtimeNowUs - elapsedRealtimeUs;
+ earlyUs -= elapsedSinceStartOfLoopUs;
+
+ // Compute the buffer's desired release time in nanoseconds.
+ long systemTimeNs = System.nanoTime();
+ long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000);
+
+ // Apply a timestamp adjustment, if there is one.
+ long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
+ bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs);
+ earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
+
+ boolean treatDroppedBuffersAsSkipped = joiningDeadlineMs != C.TIME_UNSET;
+ if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastBuffer)
+ && maybeDropBuffersToKeyframe(
+ codec, bufferIndex, presentationTimeUs, positionUs, treatDroppedBuffersAsSkipped)) {
+ return false;
+ } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastBuffer)) {
+ if (treatDroppedBuffersAsSkipped) {
+ skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ } else {
+ dropOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ }
+ return true;
+ }
+
+ if (Util.SDK_INT >= 21) {
+ // Let the underlying framework time the release.
+ if (earlyUs < 50000) {
+ notifyFrameMetadataListener(
+ presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
+ renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs);
+ return true;
+ }
+ } else {
+ // We need to time the release ourselves.
+ if (earlyUs < 30000) {
+ if (earlyUs > 11000) {
+ // We're a little too early to render the frame. Sleep until the frame can be rendered.
+ // Note: The 11ms threshold was chosen fairly arbitrarily.
+ try {
+ // Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms.
+ Thread.sleep((earlyUs - 10000) / 1000);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return false;
+ }
+ }
+ notifyFrameMetadataListener(
+ presentationTimeUs, adjustedReleaseTimeNs, format, currentMediaFormat);
+ renderOutputBuffer(codec, bufferIndex, presentationTimeUs);
+ return true;
+ }
+ }
+
+ // We're either not playing, or it's not time to render the frame yet.
+ return false;
+ }
+
+ private void processOutputFormat(MediaCodec codec, int width, int height) {
+ currentWidth = width;
+ currentHeight = height;
+ currentPixelWidthHeightRatio = pendingPixelWidthHeightRatio;
+ if (Util.SDK_INT >= 21) {
+ // On API level 21 and above the decoder applies the rotation when rendering to the surface.
+ // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need
+ // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied.
+ if (pendingRotationDegrees == 90 || pendingRotationDegrees == 270) {
+ int rotatedHeight = currentWidth;
+ currentWidth = currentHeight;
+ currentHeight = rotatedHeight;
+ currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio;
+ }
+ } else {
+ // On API level 20 and below the decoder does not apply the rotation.
+ currentUnappliedRotationDegrees = pendingRotationDegrees;
+ }
+ // Must be applied each time the output MediaFormat changes.
+ codec.setVideoScalingMode(scalingMode);
+ }
+
+ private void notifyFrameMetadataListener(
+ long presentationTimeUs, long releaseTimeNs, Format format, MediaFormat mediaFormat) {
+ if (frameMetadataListener != null) {
+ frameMetadataListener.onVideoFrameAboutToBeRendered(
+ presentationTimeUs, releaseTimeNs, format, mediaFormat);
+ }
+ }
+
+ /**
+ * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link
+ * #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, boolean,
+ * Format)} to get the playback position with respect to the media.
+ */
+ protected long getOutputStreamOffsetUs() {
+ return outputStreamOffsetUs;
+ }
+
+ /** Called when a buffer was processed in tunneling mode. */
+ protected void onProcessedTunneledBuffer(long presentationTimeUs) {
+ @Nullable Format format = updateOutputFormatForTime(presentationTimeUs);
+ if (format != null) {
+ processOutputFormat(getCodec(), format.width, format.height);
+ }
+ maybeNotifyVideoSizeChanged();
+ decoderCounters.renderedOutputBufferCount++;
+ maybeNotifyRenderedFirstFrame();
+ onProcessedOutputBuffer(presentationTimeUs);
+ }
+
+ /** Called when a output EOS was received in tunneling mode. */
+ private void onProcessedTunneledEndOfStream() {
+ setPendingOutputEndOfStream();
+ }
+
+ /**
+ * Called when an output buffer is successfully processed.
+ *
+ * @param presentationTimeUs The timestamp associated with the output buffer.
+ */
+ @CallSuper
+ @Override
+ protected void onProcessedOutputBuffer(long presentationTimeUs) {
+ if (!tunneling) {
+ buffersInCodecCount--;
+ }
+ while (pendingOutputStreamOffsetCount != 0
+ && presentationTimeUs >= pendingOutputStreamSwitchTimesUs[0]) {
+ outputStreamOffsetUs = pendingOutputStreamOffsetsUs[0];
+ pendingOutputStreamOffsetCount--;
+ System.arraycopy(
+ pendingOutputStreamOffsetsUs,
+ /* srcPos= */ 1,
+ pendingOutputStreamOffsetsUs,
+ /* destPos= */ 0,
+ pendingOutputStreamOffsetCount);
+ System.arraycopy(
+ pendingOutputStreamSwitchTimesUs,
+ /* srcPos= */ 1,
+ pendingOutputStreamSwitchTimesUs,
+ /* destPos= */ 0,
+ pendingOutputStreamOffsetCount);
+ clearRenderedFirstFrame();
+ }
+ }
+
+ /**
+ * Returns whether the buffer being processed should be dropped.
+ *
+ * @param earlyUs The time until the buffer should be presented in microseconds. A negative value
+ * indicates that the buffer is late.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ * @param isLastBuffer Whether the buffer is the last buffer in the current stream.
+ */
+ protected boolean shouldDropOutputBuffer(
+ long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
+ return isBufferLate(earlyUs) && !isLastBuffer;
+ }
+
+ /**
+ * Returns whether to drop all buffers from the buffer being processed to the keyframe at or after
+ * the current playback position, if possible.
+ *
+ * @param earlyUs The time until the current buffer should be presented in microseconds. A
+ * negative value indicates that the buffer is late.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ * @param isLastBuffer Whether the buffer is the last buffer in the current stream.
+ */
+ protected boolean shouldDropBuffersToKeyframe(
+ long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
+ return isBufferVeryLate(earlyUs) && !isLastBuffer;
+ }
+
+ /**
+ * Returns whether to force rendering an output buffer.
+ *
+ * @param earlyUs The time until the current buffer should be presented in microseconds. A
+ * negative value indicates that the buffer is late.
+ * @param elapsedSinceLastRenderUs The elapsed time since the last output buffer was rendered, in
+ * microseconds.
+ * @return Returns whether to force rendering an output buffer.
+ */
+ protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceLastRenderUs) {
+ // Force render late buffers every 100ms to avoid frozen video effect.
+ return isBufferLate(earlyUs) && elapsedSinceLastRenderUs > 100000;
+ }
+
+ /**
+ * Skips the output buffer with the specified index.
+ *
+ * @param codec The codec that owns the output buffer.
+ * @param index The index of the output buffer to skip.
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ */
+ protected void skipOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
+ TraceUtil.beginSection("skipVideoBuffer");
+ codec.releaseOutputBuffer(index, false);
+ TraceUtil.endSection();
+ decoderCounters.skippedOutputBufferCount++;
+ }
+
+ /**
+ * Drops the output buffer with the specified index.
+ *
+ * @param codec The codec that owns the output buffer.
+ * @param index The index of the output buffer to drop.
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ */
+ protected void dropOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
+ TraceUtil.beginSection("dropVideoBuffer");
+ codec.releaseOutputBuffer(index, false);
+ TraceUtil.endSection();
+ updateDroppedBufferCounters(1);
+ }
+
+ /**
+ * Drops frames from the current output buffer to the next keyframe at or before the playback
+ * position. If no such keyframe exists, as the playback position is inside the same group of
+ * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise.
+ *
+ * @param codec The codec that owns the output buffer.
+ * @param index The index of the output buffer to drop.
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ * @param positionUs The current playback position, in microseconds.
+ * @param treatDroppedBuffersAsSkipped Whether dropped buffers should be treated as intentionally
+ * skipped.
+ * @return Whether any buffers were dropped.
+ * @throws ExoPlaybackException If an error occurs flushing the codec.
+ */
+ protected boolean maybeDropBuffersToKeyframe(
+ MediaCodec codec,
+ int index,
+ long presentationTimeUs,
+ long positionUs,
+ boolean treatDroppedBuffersAsSkipped)
+ throws ExoPlaybackException {
+ int droppedSourceBufferCount = skipSource(positionUs);
+ if (droppedSourceBufferCount == 0) {
+ return false;
+ }
+ decoderCounters.droppedToKeyframeCount++;
+ // We dropped some buffers to catch up, so update the decoder counters and flush the codec,
+ // which releases all pending buffers buffers including the current output buffer.
+ int totalDroppedBufferCount = buffersInCodecCount + droppedSourceBufferCount;
+ if (treatDroppedBuffersAsSkipped) {
+ decoderCounters.skippedOutputBufferCount += totalDroppedBufferCount;
+ } else {
+ updateDroppedBufferCounters(totalDroppedBufferCount);
+ }
+ flushOrReinitializeCodec();
+ return true;
+ }
+
+ /**
+ * Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were
+ * dropped.
+ *
+ * @param droppedBufferCount The number of additional dropped buffers.
+ */
+ protected void updateDroppedBufferCounters(int droppedBufferCount) {
+ decoderCounters.droppedBufferCount += droppedBufferCount;
+ droppedFrames += droppedBufferCount;
+ consecutiveDroppedFrameCount += droppedBufferCount;
+ decoderCounters.maxConsecutiveDroppedBufferCount = Math.max(consecutiveDroppedFrameCount,
+ decoderCounters.maxConsecutiveDroppedBufferCount);
+ if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) {
+ maybeNotifyDroppedFrames();
+ }
+ }
+
+ /**
+ * Renders the output buffer with the specified index. This method is only called if the platform
+ * API version of the device is less than 21.
+ *
+ * @param codec The codec that owns the output buffer.
+ * @param index The index of the output buffer to drop.
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ */
+ protected void renderOutputBuffer(MediaCodec codec, int index, long presentationTimeUs) {
+ maybeNotifyVideoSizeChanged();
+ TraceUtil.beginSection("releaseOutputBuffer");
+ codec.releaseOutputBuffer(index, true);
+ TraceUtil.endSection();
+ lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
+ decoderCounters.renderedOutputBufferCount++;
+ consecutiveDroppedFrameCount = 0;
+ maybeNotifyRenderedFirstFrame();
+ }
+
+ /**
+ * Renders the output buffer with the specified index. This method is only called if the platform
+ * API version of the device is 21 or later.
+ *
+ * @param codec The codec that owns the output buffer.
+ * @param index The index of the output buffer to drop.
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds.
+ */
+ @TargetApi(21)
+ protected void renderOutputBufferV21(
+ MediaCodec codec, int index, long presentationTimeUs, long releaseTimeNs) {
+ maybeNotifyVideoSizeChanged();
+ TraceUtil.beginSection("releaseOutputBuffer");
+ codec.releaseOutputBuffer(index, releaseTimeNs);
+ TraceUtil.endSection();
+ lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
+ decoderCounters.renderedOutputBufferCount++;
+ consecutiveDroppedFrameCount = 0;
+ maybeNotifyRenderedFirstFrame();
+ }
+
+ private boolean shouldUseDummySurface(MediaCodecInfo codecInfo) {
+ return Util.SDK_INT >= 23
+ && !tunneling
+ && !codecNeedsSetOutputSurfaceWorkaround(codecInfo.name)
+ && (!codecInfo.secure || DummySurface.isSecureSupported(context));
+ }
+
+ private void setJoiningDeadlineMs() {
+ joiningDeadlineMs = allowedJoiningTimeMs > 0
+ ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) : C.TIME_UNSET;
+ }
+
+ private void clearRenderedFirstFrame() {
+ renderedFirstFrame = false;
+ // The first frame notification is triggered by renderOutputBuffer or renderOutputBufferV21 for
+ // non-tunneled playback, onQueueInputBuffer for tunneled playback prior to API level 23, and
+ // OnFrameRenderedListenerV23.onFrameRenderedListener for tunneled playback on API level 23 and
+ // above.
+ if (Util.SDK_INT >= 23 && tunneling) {
+ MediaCodec codec = getCodec();
+ // If codec is null then the listener will be instantiated in configureCodec.
+ if (codec != null) {
+ tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec);
+ }
+ }
+ }
+
+ /* package */ void maybeNotifyRenderedFirstFrame() {
+ if (!renderedFirstFrame) {
+ renderedFirstFrame = true;
+ eventDispatcher.renderedFirstFrame(surface);
+ }
+ }
+
+ private void maybeRenotifyRenderedFirstFrame() {
+ if (renderedFirstFrame) {
+ eventDispatcher.renderedFirstFrame(surface);
+ }
+ }
+
+ private void clearReportedVideoSize() {
+ reportedWidth = Format.NO_VALUE;
+ reportedHeight = Format.NO_VALUE;
+ reportedPixelWidthHeightRatio = Format.NO_VALUE;
+ reportedUnappliedRotationDegrees = Format.NO_VALUE;
+ }
+
+ private void maybeNotifyVideoSizeChanged() {
+ if ((currentWidth != Format.NO_VALUE || currentHeight != Format.NO_VALUE)
+ && (reportedWidth != currentWidth || reportedHeight != currentHeight
+ || reportedUnappliedRotationDegrees != currentUnappliedRotationDegrees
+ || reportedPixelWidthHeightRatio != currentPixelWidthHeightRatio)) {
+ eventDispatcher.videoSizeChanged(currentWidth, currentHeight, currentUnappliedRotationDegrees,
+ currentPixelWidthHeightRatio);
+ reportedWidth = currentWidth;
+ reportedHeight = currentHeight;
+ reportedUnappliedRotationDegrees = currentUnappliedRotationDegrees;
+ reportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
+ }
+ }
+
+ private void maybeRenotifyVideoSizeChanged() {
+ if (reportedWidth != Format.NO_VALUE || reportedHeight != Format.NO_VALUE) {
+ eventDispatcher.videoSizeChanged(reportedWidth, reportedHeight,
+ reportedUnappliedRotationDegrees, reportedPixelWidthHeightRatio);
+ }
+ }
+
+ private void maybeNotifyDroppedFrames() {
+ if (droppedFrames > 0) {
+ long now = SystemClock.elapsedRealtime();
+ long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
+ eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
+ droppedFrames = 0;
+ droppedFrameAccumulationStartTimeMs = now;
+ }
+ }
+
+ private static boolean isBufferLate(long earlyUs) {
+ // Class a buffer as late if it should have been presented more than 30 ms ago.
+ return earlyUs < -30000;
+ }
+
+ private static boolean isBufferVeryLate(long earlyUs) {
+ // Class a buffer as very late if it should have been presented more than 500 ms ago.
+ return earlyUs < -500000;
+ }
+
+ @TargetApi(29)
+ private static void setHdr10PlusInfoV29(MediaCodec codec, byte[] hdr10PlusInfo) {
+ Bundle codecParameters = new Bundle();
+ codecParameters.putByteArray(MediaCodec.PARAMETER_KEY_HDR10_PLUS_INFO, hdr10PlusInfo);
+ codec.setParameters(codecParameters);
+ }
+
+ @TargetApi(23)
+ private static void setOutputSurfaceV23(MediaCodec codec, Surface surface) {
+ codec.setOutputSurface(surface);
+ }
+
+ @TargetApi(21)
+ private static void configureTunnelingV21(MediaFormat mediaFormat, int tunnelingAudioSessionId) {
+ mediaFormat.setFeatureEnabled(CodecCapabilities.FEATURE_TunneledPlayback, true);
+ mediaFormat.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, tunnelingAudioSessionId);
+ }
+
+ /**
+ * Returns the framework {@link MediaFormat} that should be used to configure the decoder.
+ *
+ * @param format The {@link Format} of media.
+ * @param codecMimeType The MIME type handled by the codec.
+ * @param codecMaxValues Codec max values that should be used when configuring the decoder.
+ * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if
+ * no codec operating rate should be set.
+ * @param deviceNeedsNoPostProcessWorkaround Whether the device is known to do post processing by
+ * default that isn't compatible with ExoPlayer.
+ * @param tunnelingAudioSessionId The audio session id to use for tunneling, or {@link
+ * C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
+ * @return The framework {@link MediaFormat} that should be used to configure the decoder.
+ */
+ @SuppressLint("InlinedApi")
+ protected MediaFormat getMediaFormat(
+ Format format,
+ String codecMimeType,
+ CodecMaxValues codecMaxValues,
+ float codecOperatingRate,
+ boolean deviceNeedsNoPostProcessWorkaround,
+ int tunnelingAudioSessionId) {
+ MediaFormat mediaFormat = new MediaFormat();
+ // Set format parameters that should always be set.
+ mediaFormat.setString(MediaFormat.KEY_MIME, codecMimeType);
+ mediaFormat.setInteger(MediaFormat.KEY_WIDTH, format.width);
+ mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, format.height);
+ MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
+ // Set format parameters that may be unset.
+ MediaFormatUtil.maybeSetFloat(mediaFormat, MediaFormat.KEY_FRAME_RATE, format.frameRate);
+ MediaFormatUtil.maybeSetInteger(mediaFormat, MediaFormat.KEY_ROTATION, format.rotationDegrees);
+ MediaFormatUtil.maybeSetColorInfo(mediaFormat, format.colorInfo);
+ if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) {
+ // Some phones require the profile to be set on the codec.
+ // See https://github.com/google/ExoPlayer/pull/5438.
+ Pair<Integer, Integer> codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format);
+ if (codecProfileAndLevel != null) {
+ MediaFormatUtil.maybeSetInteger(
+ mediaFormat, MediaFormat.KEY_PROFILE, codecProfileAndLevel.first);
+ }
+ }
+ // Set codec max values.
+ mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, codecMaxValues.width);
+ mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, codecMaxValues.height);
+ MediaFormatUtil.maybeSetInteger(
+ mediaFormat, MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxValues.inputSize);
+ // Set codec configuration values.
+ if (Util.SDK_INT >= 23) {
+ mediaFormat.setInteger(MediaFormat.KEY_PRIORITY, 0 /* realtime priority */);
+ if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET) {
+ mediaFormat.setFloat(MediaFormat.KEY_OPERATING_RATE, codecOperatingRate);
+ }
+ }
+ if (deviceNeedsNoPostProcessWorkaround) {
+ mediaFormat.setInteger("no-post-process", 1);
+ mediaFormat.setInteger("auto-frc", 0);
+ }
+ if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
+ configureTunnelingV21(mediaFormat, tunnelingAudioSessionId);
+ }
+ return mediaFormat;
+ }
+
+ /**
+ * Returns {@link CodecMaxValues} suitable for configuring a codec for {@code format} in a way
+ * that will allow possible adaptation to other compatible formats in {@code streamFormats}.
+ *
+ * @param codecInfo Information about the {@link MediaCodec} being configured.
+ * @param format The {@link Format} for which the codec is being configured.
+ * @param streamFormats The possible stream formats.
+ * @return Suitable {@link CodecMaxValues}.
+ */
+ protected CodecMaxValues getCodecMaxValues(
+ MediaCodecInfo codecInfo, Format format, Format[] streamFormats) {
+ int maxWidth = format.width;
+ int maxHeight = format.height;
+ int maxInputSize = getMaxInputSize(codecInfo, format);
+ if (streamFormats.length == 1) {
+ // The single entry in streamFormats must correspond to the format for which the codec is
+ // being configured.
+ if (maxInputSize != Format.NO_VALUE) {
+ int codecMaxInputSize =
+ getCodecMaxInputSize(codecInfo, format.sampleMimeType, format.width, format.height);
+ if (codecMaxInputSize != Format.NO_VALUE) {
+ // Scale up the initial video decoder maximum input size so playlist item transitions with
+ // small increases in maximum sample size don't require reinitialization. This only makes
+ // a difference if the exact maximum sample sizes are known from the container.
+ int scaledMaxInputSize =
+ (int) (maxInputSize * INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR);
+ // Avoid exceeding the maximum expected for the codec.
+ maxInputSize = Math.min(scaledMaxInputSize, codecMaxInputSize);
+ }
+ }
+ return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
+ }
+ boolean haveUnknownDimensions = false;
+ for (Format streamFormat : streamFormats) {
+ if (codecInfo.isSeamlessAdaptationSupported(
+ format, streamFormat, /* isNewFormatComplete= */ false)) {
+ haveUnknownDimensions |=
+ (streamFormat.width == Format.NO_VALUE || streamFormat.height == Format.NO_VALUE);
+ maxWidth = Math.max(maxWidth, streamFormat.width);
+ maxHeight = Math.max(maxHeight, streamFormat.height);
+ maxInputSize = Math.max(maxInputSize, getMaxInputSize(codecInfo, streamFormat));
+ }
+ }
+ if (haveUnknownDimensions) {
+ Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight);
+ Point codecMaxSize = getCodecMaxSize(codecInfo, format);
+ if (codecMaxSize != null) {
+ maxWidth = Math.max(maxWidth, codecMaxSize.x);
+ maxHeight = Math.max(maxHeight, codecMaxSize.y);
+ maxInputSize =
+ Math.max(
+ maxInputSize,
+ getCodecMaxInputSize(codecInfo, format.sampleMimeType, maxWidth, maxHeight));
+ Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight);
+ }
+ }
+ return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
+ }
+
+ @Override
+ protected DecoderException createDecoderException(
+ Throwable cause, @Nullable MediaCodecInfo codecInfo) {
+ return new VideoDecoderException(cause, codecInfo, surface);
+ }
+
+ /**
+ * Returns a maximum video size to use when configuring a codec for {@code format} in a way that
+ * will allow possible adaptation to other compatible formats that are expected to have the same
+ * aspect ratio, but whose sizes are unknown.
+ *
+ * @param codecInfo Information about the {@link MediaCodec} being configured.
+ * @param format The {@link Format} for which the codec is being configured.
+ * @return The maximum video size to use, or null if the size of {@code format} should be used.
+ */
+ private static Point getCodecMaxSize(MediaCodecInfo codecInfo, Format format) {
+ boolean isVerticalVideo = format.height > format.width;
+ int formatLongEdgePx = isVerticalVideo ? format.height : format.width;
+ int formatShortEdgePx = isVerticalVideo ? format.width : format.height;
+ float aspectRatio = (float) formatShortEdgePx / formatLongEdgePx;
+ for (int longEdgePx : STANDARD_LONG_EDGE_VIDEO_PX) {
+ int shortEdgePx = (int) (longEdgePx * aspectRatio);
+ if (longEdgePx <= formatLongEdgePx || shortEdgePx <= formatShortEdgePx) {
+ // Don't return a size not larger than the format for which the codec is being configured.
+ return null;
+ } else if (Util.SDK_INT >= 21) {
+ Point alignedSize = codecInfo.alignVideoSizeV21(isVerticalVideo ? shortEdgePx : longEdgePx,
+ isVerticalVideo ? longEdgePx : shortEdgePx);
+ float frameRate = format.frameRate;
+ if (codecInfo.isVideoSizeAndRateSupportedV21(alignedSize.x, alignedSize.y, frameRate)) {
+ return alignedSize;
+ }
+ } else {
+ try {
+ // Conservatively assume the codec requires 16px width and height alignment.
+ longEdgePx = Util.ceilDivide(longEdgePx, 16) * 16;
+ shortEdgePx = Util.ceilDivide(shortEdgePx, 16) * 16;
+ if (longEdgePx * shortEdgePx <= MediaCodecUtil.maxH264DecodableFrameSize()) {
+ return new Point(
+ isVerticalVideo ? shortEdgePx : longEdgePx,
+ isVerticalVideo ? longEdgePx : shortEdgePx);
+ }
+ } catch (DecoderQueryException e) {
+ // We tried our best. Give up!
+ return null;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns a maximum input buffer size for a given {@link MediaCodec} and {@link Format}.
+ *
+ * @param codecInfo Information about the {@link MediaCodec} being configured.
+ * @param format The format.
+ * @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not
+ * be determined.
+ */
+ private static int getMaxInputSize(MediaCodecInfo codecInfo, Format format) {
+ if (format.maxInputSize != Format.NO_VALUE) {
+ // The format defines an explicit maximum input size. Add the total size of initialization
+ // data buffers, as they may need to be queued in the same input buffer as the largest sample.
+ int totalInitializationDataSize = 0;
+ int initializationDataCount = format.initializationData.size();
+ for (int i = 0; i < initializationDataCount; i++) {
+ totalInitializationDataSize += format.initializationData.get(i).length;
+ }
+ return format.maxInputSize + totalInitializationDataSize;
+ } else {
+ // Calculated maximum input sizes are overestimates, so it's not necessary to add the size of
+ // initialization data.
+ return getCodecMaxInputSize(codecInfo, format.sampleMimeType, format.width, format.height);
+ }
+ }
+
+ /**
+ * Returns a maximum input size for a given codec, MIME type, width and height.
+ *
+ * @param codecInfo Information about the {@link MediaCodec} being configured.
+ * @param sampleMimeType The format mime type.
+ * @param width The width in pixels.
+ * @param height The height in pixels.
+ * @return A maximum input size in bytes, or {@link Format#NO_VALUE} if a maximum could not be
+ * determined.
+ */
+ private static int getCodecMaxInputSize(
+ MediaCodecInfo codecInfo, String sampleMimeType, int width, int height) {
+ if (width == Format.NO_VALUE || height == Format.NO_VALUE) {
+ // We can't infer a maximum input size without video dimensions.
+ return Format.NO_VALUE;
+ }
+
+ // Attempt to infer a maximum input size from the format.
+ int maxPixels;
+ int minCompressionRatio;
+ switch (sampleMimeType) {
+ case MimeTypes.VIDEO_H263:
+ case MimeTypes.VIDEO_MP4V:
+ maxPixels = width * height;
+ minCompressionRatio = 2;
+ break;
+ case MimeTypes.VIDEO_H264:
+ if ("BRAVIA 4K 2015".equals(Util.MODEL) // Sony Bravia 4K
+ || ("Amazon".equals(Util.MANUFACTURER)
+ && ("KFSOWI".equals(Util.MODEL) // Kindle Soho
+ || ("AFTS".equals(Util.MODEL) && codecInfo.secure)))) { // Fire TV Gen 2
+ // Use the default value for cases where platform limitations may prevent buffers of the
+ // calculated maximum input size from being allocated.
+ return Format.NO_VALUE;
+ }
+ // Round up width/height to an integer number of macroblocks.
+ maxPixels = Util.ceilDivide(width, 16) * Util.ceilDivide(height, 16) * 16 * 16;
+ minCompressionRatio = 2;
+ break;
+ case MimeTypes.VIDEO_VP8:
+ // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp.
+ maxPixels = width * height;
+ minCompressionRatio = 2;
+ break;
+ case MimeTypes.VIDEO_H265:
+ case MimeTypes.VIDEO_VP9:
+ maxPixels = width * height;
+ minCompressionRatio = 4;
+ break;
+ default:
+ // Leave the default max input size.
+ return Format.NO_VALUE;
+ }
+ // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
+ return (maxPixels * 3) / (2 * minCompressionRatio);
+ }
+
+ /**
+ * Returns whether the device is known to do post processing by default that isn't compatible with
+ * ExoPlayer.
+ *
+ * @return Whether the device is known to do post processing by default that isn't compatible with
+ * ExoPlayer.
+ */
+ private static boolean deviceNeedsNoPostProcessWorkaround() {
+ // Nvidia devices prior to M try to adjust the playback rate to better map the frame-rate of
+ // content to the refresh rate of the display. For example playback of 23.976fps content is
+ // adjusted to play at 1.001x speed when the output display is 60Hz. Unfortunately the
+ // implementation causes ExoPlayer's reported playback position to drift out of sync. Captions
+ // also lose sync [Internal: b/26453592]. Even after M, the devices may apply post processing
+ // operations that can modify frame output timestamps, which is incompatible with ExoPlayer's
+ // logic for skipping decode-only frames.
+ return "NVIDIA".equals(Util.MANUFACTURER);
+ }
+
+ /*
+ * TODO:
+ *
+ * 1. Validate that Android device certification now ensures correct behavior, and add a
+ * corresponding SDK_INT upper bound for applying the workaround (probably SDK_INT < 26).
+ * 2. Determine a complete list of affected devices.
+ * 3. Some of the devices in this list only fail to support setOutputSurface when switching from
+ * a SurfaceView provided Surface to a Surface of another type (e.g. TextureView/DummySurface),
+ * and vice versa. One hypothesis is that setOutputSurface fails when the surfaces have
+ * different pixel formats. If we can find a way to query the Surface instances to determine
+ * whether this case applies, then we'll be able to provide a more targeted workaround.
+ */
+ /**
+ * Returns whether the codec is known to implement {@link MediaCodec#setOutputSurface(Surface)}
+ * incorrectly.
+ *
+ * <p>If true is returned then we fall back to releasing and re-instantiating the codec instead.
+ *
+ * @param name The name of the codec.
+ * @return True if the device is known to implement {@link MediaCodec#setOutputSurface(Surface)}
+ * incorrectly.
+ */
+ protected boolean codecNeedsSetOutputSurfaceWorkaround(String name) {
+ if (name.startsWith("OMX.google")) {
+ // Google OMX decoders are not known to have this issue on any API level.
+ return false;
+ }
+ synchronized (MediaCodecVideoRenderer.class) {
+ if (!evaluatedDeviceNeedsSetOutputSurfaceWorkaround) {
+ if ("dangal".equals(Util.DEVICE)) {
+ // Workaround for MiTV devices:
+ // https://github.com/google/ExoPlayer/issues/5169,
+ // https://github.com/google/ExoPlayer/issues/6899.
+ deviceNeedsSetOutputSurfaceWorkaround = true;
+ } else if (Util.SDK_INT <= 27 && "HWEML".equals(Util.DEVICE)) {
+ // Workaround for Huawei P20:
+ // https://github.com/google/ExoPlayer/issues/4468#issuecomment-459291645.
+ deviceNeedsSetOutputSurfaceWorkaround = true;
+ } else if (Util.SDK_INT >= 27) {
+ // In general, devices running API level 27 or later should be unaffected. Do nothing.
+ } else {
+ // Enable the workaround on a per-device basis. Works around:
+ // https://github.com/google/ExoPlayer/issues/3236,
+ // https://github.com/google/ExoPlayer/issues/3355,
+ // https://github.com/google/ExoPlayer/issues/3439,
+ // https://github.com/google/ExoPlayer/issues/3724,
+ // https://github.com/google/ExoPlayer/issues/3835,
+ // https://github.com/google/ExoPlayer/issues/4006,
+ // https://github.com/google/ExoPlayer/issues/4084,
+ // https://github.com/google/ExoPlayer/issues/4104,
+ // https://github.com/google/ExoPlayer/issues/4134,
+ // https://github.com/google/ExoPlayer/issues/4315,
+ // https://github.com/google/ExoPlayer/issues/4419,
+ // https://github.com/google/ExoPlayer/issues/4460,
+ // https://github.com/google/ExoPlayer/issues/4468,
+ // https://github.com/google/ExoPlayer/issues/5312,
+ // https://github.com/google/ExoPlayer/issues/6503.
+ switch (Util.DEVICE) {
+ case "1601":
+ case "1713":
+ case "1714":
+ case "A10-70F":
+ case "A10-70L":
+ case "A1601":
+ case "A2016a40":
+ case "A7000-a":
+ case "A7000plus":
+ case "A7010a48":
+ case "A7020a48":
+ case "AquaPowerM":
+ case "ASUS_X00AD_2":
+ case "Aura_Note_2":
+ case "BLACK-1X":
+ case "BRAVIA_ATV2":
+ case "BRAVIA_ATV3_4K":
+ case "C1":
+ case "ComioS1":
+ case "CP8676_I02":
+ case "CPH1609":
+ case "CPY83_I00":
+ case "cv1":
+ case "cv3":
+ case "deb":
+ case "E5643":
+ case "ELUGA_A3_Pro":
+ case "ELUGA_Note":
+ case "ELUGA_Prim":
+ case "ELUGA_Ray_X":
+ case "EverStar_S":
+ case "F3111":
+ case "F3113":
+ case "F3116":
+ case "F3211":
+ case "F3213":
+ case "F3215":
+ case "F3311":
+ case "flo":
+ case "fugu":
+ case "GiONEE_CBL7513":
+ case "GiONEE_GBL7319":
+ case "GIONEE_GBL7360":
+ case "GIONEE_SWW1609":
+ case "GIONEE_SWW1627":
+ case "GIONEE_SWW1631":
+ case "GIONEE_WBL5708":
+ case "GIONEE_WBL7365":
+ case "GIONEE_WBL7519":
+ case "griffin":
+ case "htc_e56ml_dtul":
+ case "hwALE-H":
+ case "HWBLN-H":
+ case "HWCAM-H":
+ case "HWVNS-H":
+ case "HWWAS-H":
+ case "i9031":
+ case "iball8735_9806":
+ case "Infinix-X572":
+ case "iris60":
+ case "itel_S41":
+ case "j2xlteins":
+ case "JGZ":
+ case "K50a40":
+ case "kate":
+ case "l5460":
+ case "le_x6":
+ case "LS-5017":
+ case "M5c":
+ case "manning":
+ case "marino_f":
+ case "MEIZU_M5":
+ case "mh":
+ case "mido":
+ case "MX6":
+ case "namath":
+ case "nicklaus_f":
+ case "NX541J":
+ case "NX573J":
+ case "OnePlus5T":
+ case "p212":
+ case "P681":
+ case "P85":
+ case "panell_d":
+ case "panell_dl":
+ case "panell_ds":
+ case "panell_dt":
+ case "PB2-670M":
+ case "PGN528":
+ case "PGN610":
+ case "PGN611":
+ case "Phantom6":
+ case "Pixi4-7_3G":
+ case "Pixi5-10_4G":
+ case "PLE":
+ case "PRO7S":
+ case "Q350":
+ case "Q4260":
+ case "Q427":
+ case "Q4310":
+ case "Q5":
+ case "QM16XE_U":
+ case "QX1":
+ case "santoni":
+ case "Slate_Pro":
+ case "SVP-DTV15":
+ case "s905x018":
+ case "taido_row":
+ case "TB3-730F":
+ case "TB3-730X":
+ case "TB3-850F":
+ case "TB3-850M":
+ case "tcl_eu":
+ case "V1":
+ case "V23GB":
+ case "V5":
+ case "vernee_M5":
+ case "watson":
+ case "whyred":
+ case "woods_f":
+ case "woods_fn":
+ case "X3_HK":
+ case "XE2X":
+ case "XT1663":
+ case "Z12_PRO":
+ case "Z80":
+ deviceNeedsSetOutputSurfaceWorkaround = true;
+ break;
+ default:
+ // Do nothing.
+ break;
+ }
+ switch (Util.MODEL) {
+ case "AFTA":
+ case "AFTN":
+ case "JSN-L21":
+ deviceNeedsSetOutputSurfaceWorkaround = true;
+ break;
+ default:
+ // Do nothing.
+ break;
+ }
+ }
+ evaluatedDeviceNeedsSetOutputSurfaceWorkaround = true;
+ }
+ }
+ return deviceNeedsSetOutputSurfaceWorkaround;
+ }
+
+ protected Surface getSurface() {
+ return surface;
+ }
+
+ protected static final class CodecMaxValues {
+
+ public final int width;
+ public final int height;
+ public final int inputSize;
+
+ public CodecMaxValues(int width, int height, int inputSize) {
+ this.width = width;
+ this.height = height;
+ this.inputSize = inputSize;
+ }
+
+ }
+
+ @TargetApi(23)
+ private final class OnFrameRenderedListenerV23
+ implements MediaCodec.OnFrameRenderedListener, Handler.Callback {
+
+ private static final int HANDLE_FRAME_RENDERED = 0;
+
+ private final Handler handler;
+
+ public OnFrameRenderedListenerV23(MediaCodec codec) {
+ handler = new Handler(this);
+ codec.setOnFrameRenderedListener(/* listener= */ this, handler);
+ }
+
+ @Override
+ public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) {
+ // Workaround bug in MediaCodec that causes deadlock if you call directly back into the
+ // MediaCodec from this listener method.
+ // Deadlock occurs because MediaCodec calls this listener method holding a lock,
+ // which may also be required by calls made back into the MediaCodec.
+ // This was fixed in https://android-review.googlesource.com/1156807.
+ //
+ // The workaround queues the event for subsequent processing, where the lock will not be held.
+ if (Util.SDK_INT < 30) {
+ Message message =
+ Message.obtain(
+ handler,
+ /* what= */ HANDLE_FRAME_RENDERED,
+ /* arg1= */ (int) (presentationTimeUs >> 32),
+ /* arg2= */ (int) presentationTimeUs);
+ handler.sendMessageAtFrontOfQueue(message);
+ } else {
+ handleFrameRendered(presentationTimeUs);
+ }
+ }
+
+ @Override
+ public boolean handleMessage(Message message) {
+ switch (message.what) {
+ case HANDLE_FRAME_RENDERED:
+ handleFrameRendered(Util.toLong(message.arg1, message.arg2));
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private void handleFrameRendered(long presentationTimeUs) {
+ if (this != tunnelingOnFrameRenderedListener) {
+ // Stale event.
+ return;
+ }
+ if (presentationTimeUs == TUNNELING_EOS_PRESENTATION_TIME_US) {
+ onProcessedTunneledEndOfStream();
+ } else {
+ onProcessedTunneledBuffer(presentationTimeUs);
+ }
+ }
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java
new file mode 100644
index 0000000000..fbcd4d959c
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/SimpleDecoderVideoRenderer.java
@@ -0,0 +1,975 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.os.Handler;
+import android.os.SystemClock;
+import android.view.Surface;
+import androidx.annotation.CallSuper;
+import androidx.annotation.IntDef;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.BaseRenderer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.FormatHolder;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.RendererCapabilities;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCounters;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.SimpleDecoder;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSession.DrmSessionException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.DrmSessionManager;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.drm.ExoMediaCrypto;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.TimedValueQueue;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.TraceUtil;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher;
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/** Decodes and renders video using a {@link SimpleDecoder}. */
+public abstract class SimpleDecoderVideoRenderer extends BaseRenderer {
+
+ /** Decoder reinitialization states. */
+ @Documented
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({
+ REINITIALIZATION_STATE_NONE,
+ REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM,
+ REINITIALIZATION_STATE_WAIT_END_OF_STREAM
+ })
+ private @interface ReinitializationState {}
+ /** The decoder does not need to be re-initialized. */
+ private static final int REINITIALIZATION_STATE_NONE = 0;
+ /**
+ * The input format has changed in a way that requires the decoder to be re-initialized, but we
+ * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to
+ * ensure that it outputs any remaining buffers before we release it.
+ */
+ private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1;
+ /**
+ * The input format has changed in a way that requires the decoder to be re-initialized, and we've
+ * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an
+ * end of stream signal to indicate that it has output any remaining buffers before we release it.
+ */
+ private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2;
+
+ private final long allowedJoiningTimeMs;
+ private final int maxDroppedFramesToNotify;
+ private final boolean playClearSamplesWithoutKeys;
+ private final EventDispatcher eventDispatcher;
+ private final TimedValueQueue<Format> formatQueue;
+ private final DecoderInputBuffer flagsOnlyBuffer;
+ private final DrmSessionManager<ExoMediaCrypto> drmSessionManager;
+
+ private boolean drmResourcesAcquired;
+ private Format inputFormat;
+ private Format outputFormat;
+ private SimpleDecoder<
+ VideoDecoderInputBuffer,
+ ? extends VideoDecoderOutputBuffer,
+ ? extends VideoDecoderException>
+ decoder;
+ private VideoDecoderInputBuffer inputBuffer;
+ private VideoDecoderOutputBuffer outputBuffer;
+ @Nullable private Surface surface;
+ @Nullable private VideoDecoderOutputBufferRenderer outputBufferRenderer;
+ @C.VideoOutputMode private int outputMode;
+
+ @Nullable private DrmSession<ExoMediaCrypto> decoderDrmSession;
+ @Nullable private DrmSession<ExoMediaCrypto> sourceDrmSession;
+
+ @ReinitializationState private int decoderReinitializationState;
+ private boolean decoderReceivedBuffers;
+
+ private boolean renderedFirstFrame;
+ private long initialPositionUs;
+ private long joiningDeadlineMs;
+ private boolean waitingForKeys;
+ private boolean waitingForFirstSampleInFormat;
+
+ private boolean inputStreamEnded;
+ private boolean outputStreamEnded;
+ private int reportedWidth;
+ private int reportedHeight;
+
+ private long droppedFrameAccumulationStartTimeMs;
+ private int droppedFrames;
+ private int consecutiveDroppedFrameCount;
+ private int buffersInCodecCount;
+ private long lastRenderTimeUs;
+ private long outputStreamOffsetUs;
+
+ /** Decoder event counters used for debugging purposes. */
+ protected DecoderCounters decoderCounters;
+
+ /**
+ * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
+ * can attempt to seamlessly join an ongoing playback.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}.
+ * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
+ * media is not required.
+ * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
+ * For example a media file may start with a short clear region so as to allow playback to
+ * begin in parallel with key acquisition. This parameter specifies whether the renderer is
+ * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
+ * has obtained the keys necessary to decrypt encrypted regions of the media.
+ */
+ protected SimpleDecoderVideoRenderer(
+ long allowedJoiningTimeMs,
+ @Nullable Handler eventHandler,
+ @Nullable VideoRendererEventListener eventListener,
+ int maxDroppedFramesToNotify,
+ @Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager,
+ boolean playClearSamplesWithoutKeys) {
+ super(C.TRACK_TYPE_VIDEO);
+ this.allowedJoiningTimeMs = allowedJoiningTimeMs;
+ this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
+ this.drmSessionManager = drmSessionManager;
+ this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
+ joiningDeadlineMs = C.TIME_UNSET;
+ clearReportedVideoSize();
+ formatQueue = new TimedValueQueue<>();
+ flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
+ eventDispatcher = new EventDispatcher(eventHandler, eventListener);
+ decoderReinitializationState = REINITIALIZATION_STATE_NONE;
+ outputMode = C.VIDEO_OUTPUT_MODE_NONE;
+ }
+
+ // BaseRenderer implementation.
+
+ @Override
+ @Capabilities
+ public final int supportsFormat(Format format) {
+ return supportsFormatInternal(drmSessionManager, format);
+ }
+
+ @Override
+ public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
+ if (outputStreamEnded) {
+ return;
+ }
+
+ if (inputFormat == null) {
+ // We don't have a format yet, so try and read one.
+ FormatHolder formatHolder = getFormatHolder();
+ flagsOnlyBuffer.clear();
+ int result = readSource(formatHolder, flagsOnlyBuffer, true);
+ if (result == C.RESULT_FORMAT_READ) {
+ onInputFormatChanged(formatHolder);
+ } else if (result == C.RESULT_BUFFER_READ) {
+ // End of stream read having not read a format.
+ Assertions.checkState(flagsOnlyBuffer.isEndOfStream());
+ inputStreamEnded = true;
+ outputStreamEnded = true;
+ return;
+ } else {
+ // We still don't have a format and can't make progress without one.
+ return;
+ }
+ }
+
+ // If we don't have a decoder yet, we need to instantiate one.
+ maybeInitDecoder();
+
+ if (decoder != null) {
+ try {
+ // Rendering loop.
+ TraceUtil.beginSection("drainAndFeed");
+ while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {}
+ while (feedInputBuffer()) {}
+ TraceUtil.endSection();
+ } catch (VideoDecoderException e) {
+ throw createRendererException(e, inputFormat);
+ }
+ decoderCounters.ensureUpdated();
+ }
+ }
+
+ @Override
+ public boolean isEnded() {
+ return outputStreamEnded;
+ }
+
+ @Override
+ public boolean isReady() {
+ if (waitingForKeys) {
+ return false;
+ }
+ if (inputFormat != null
+ && (isSourceReady() || outputBuffer != null)
+ && (renderedFirstFrame || !hasOutput())) {
+ // Ready. If we were joining then we've now joined, so clear the joining deadline.
+ joiningDeadlineMs = C.TIME_UNSET;
+ return true;
+ } else if (joiningDeadlineMs == C.TIME_UNSET) {
+ // Not joining.
+ return false;
+ } else if (SystemClock.elapsedRealtime() < joiningDeadlineMs) {
+ // Joining and still within the joining deadline.
+ return true;
+ } else {
+ // The joining deadline has been exceeded. Give up and clear the deadline.
+ joiningDeadlineMs = C.TIME_UNSET;
+ return false;
+ }
+ }
+
+ // Protected methods.
+
+ @Override
+ protected void onEnabled(boolean joining) throws ExoPlaybackException {
+ if (drmSessionManager != null && !drmResourcesAcquired) {
+ drmResourcesAcquired = true;
+ drmSessionManager.prepare();
+ }
+ decoderCounters = new DecoderCounters();
+ eventDispatcher.enabled(decoderCounters);
+ }
+
+ @Override
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
+ inputStreamEnded = false;
+ outputStreamEnded = false;
+ clearRenderedFirstFrame();
+ initialPositionUs = C.TIME_UNSET;
+ consecutiveDroppedFrameCount = 0;
+ if (decoder != null) {
+ flushDecoder();
+ }
+ if (joining) {
+ setJoiningDeadlineMs();
+ } else {
+ joiningDeadlineMs = C.TIME_UNSET;
+ }
+ formatQueue.clear();
+ }
+
+ @Override
+ protected void onStarted() {
+ droppedFrames = 0;
+ droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
+ lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
+ }
+
+ @Override
+ protected void onStopped() {
+ joiningDeadlineMs = C.TIME_UNSET;
+ maybeNotifyDroppedFrames();
+ }
+
+ @Override
+ protected void onDisabled() {
+ inputFormat = null;
+ waitingForKeys = false;
+ clearReportedVideoSize();
+ clearRenderedFirstFrame();
+ try {
+ setSourceDrmSession(null);
+ releaseDecoder();
+ } finally {
+ eventDispatcher.disabled(decoderCounters);
+ }
+ }
+
+ @Override
+ protected void onReset() {
+ if (drmSessionManager != null && drmResourcesAcquired) {
+ drmResourcesAcquired = false;
+ drmSessionManager.release();
+ }
+ }
+
+ @Override
+ protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
+ outputStreamOffsetUs = offsetUs;
+ super.onStreamChanged(formats, offsetUs);
+ }
+
+ /**
+ * Called when a decoder has been created and configured.
+ *
+ * <p>The default implementation is a no-op.
+ *
+ * @param name The name of the decoder that was initialized.
+ * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
+ * finished.
+ * @param initializationDurationMs The time taken to initialize the decoder, in milliseconds.
+ */
+ @CallSuper
+ protected void onDecoderInitialized(
+ String name, long initializedTimestampMs, long initializationDurationMs) {
+ eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs);
+ }
+
+ /**
+ * Flushes the decoder.
+ *
+ * @throws ExoPlaybackException If an error occurs reinitializing a decoder.
+ */
+ @CallSuper
+ protected void flushDecoder() throws ExoPlaybackException {
+ waitingForKeys = false;
+ buffersInCodecCount = 0;
+ if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) {
+ releaseDecoder();
+ maybeInitDecoder();
+ } else {
+ inputBuffer = null;
+ if (outputBuffer != null) {
+ outputBuffer.release();
+ outputBuffer = null;
+ }
+ decoder.flush();
+ decoderReceivedBuffers = false;
+ }
+ }
+
+ /** Releases the decoder. */
+ @CallSuper
+ protected void releaseDecoder() {
+ inputBuffer = null;
+ outputBuffer = null;
+ decoderReinitializationState = REINITIALIZATION_STATE_NONE;
+ decoderReceivedBuffers = false;
+ buffersInCodecCount = 0;
+ if (decoder != null) {
+ decoder.release();
+ decoder = null;
+ decoderCounters.decoderReleaseCount++;
+ }
+ setDecoderDrmSession(null);
+ }
+
+ /**
+ * Called when a new format is read from the upstream source.
+ *
+ * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}.
+ * @throws ExoPlaybackException If an error occurs (re-)initializing the decoder.
+ */
+ @CallSuper
+ @SuppressWarnings("unchecked")
+ protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
+ waitingForFirstSampleInFormat = true;
+ Format newFormat = Assertions.checkNotNull(formatHolder.format);
+ if (formatHolder.includesDrmSession) {
+ setSourceDrmSession((DrmSession<ExoMediaCrypto>) formatHolder.drmSession);
+ } else {
+ sourceDrmSession =
+ getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession);
+ }
+ inputFormat = newFormat;
+
+ if (sourceDrmSession != decoderDrmSession) {
+ if (decoderReceivedBuffers) {
+ // Signal end of stream and wait for any final output buffers before re-initialization.
+ decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
+ } else {
+ // There aren't any final output buffers, so release the decoder immediately.
+ releaseDecoder();
+ maybeInitDecoder();
+ }
+ }
+
+ eventDispatcher.inputFormatChanged(inputFormat);
+ }
+
+ /**
+ * Called immediately before an input buffer is queued into the decoder.
+ *
+ * <p>The default implementation is a no-op.
+ *
+ * @param buffer The buffer that will be queued.
+ */
+ protected void onQueueInputBuffer(VideoDecoderInputBuffer buffer) {
+ // Do nothing.
+ }
+
+ /**
+ * Called when an output buffer is successfully processed.
+ *
+ * @param presentationTimeUs The timestamp associated with the output buffer.
+ */
+ @CallSuper
+ protected void onProcessedOutputBuffer(long presentationTimeUs) {
+ buffersInCodecCount--;
+ }
+
+ /**
+ * Returns whether the buffer being processed should be dropped.
+ *
+ * @param earlyUs The time until the buffer should be presented in microseconds. A negative value
+ * indicates that the buffer is late.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ */
+ protected boolean shouldDropOutputBuffer(long earlyUs, long elapsedRealtimeUs) {
+ return isBufferLate(earlyUs);
+ }
+
+ /**
+ * Returns whether to drop all buffers from the buffer being processed to the keyframe at or after
+ * the current playback position, if possible.
+ *
+ * @param earlyUs The time until the current buffer should be presented in microseconds. A
+ * negative value indicates that the buffer is late.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ */
+ protected boolean shouldDropBuffersToKeyframe(long earlyUs, long elapsedRealtimeUs) {
+ return isBufferVeryLate(earlyUs);
+ }
+
+ /**
+ * Returns whether to force rendering an output buffer.
+ *
+ * @param earlyUs The time until the current buffer should be presented in microseconds. A
+ * negative value indicates that the buffer is late.
+ * @param elapsedSinceLastRenderUs The elapsed time since the last output buffer was rendered, in
+ * microseconds.
+ * @return Returns whether to force rendering an output buffer.
+ */
+ protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceLastRenderUs) {
+ return isBufferLate(earlyUs) && elapsedSinceLastRenderUs > 100000;
+ }
+
+ /**
+ * Skips the specified output buffer and releases it.
+ *
+ * @param outputBuffer The output buffer to skip.
+ */
+ protected void skipOutputBuffer(VideoDecoderOutputBuffer outputBuffer) {
+ decoderCounters.skippedOutputBufferCount++;
+ outputBuffer.release();
+ }
+
+ /**
+ * Drops the specified output buffer and releases it.
+ *
+ * @param outputBuffer The output buffer to drop.
+ */
+ protected void dropOutputBuffer(VideoDecoderOutputBuffer outputBuffer) {
+ updateDroppedBufferCounters(1);
+ outputBuffer.release();
+ }
+
+ /**
+ * Drops frames from the current output buffer to the next keyframe at or before the playback
+ * position. If no such keyframe exists, as the playback position is inside the same group of
+ * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise.
+ *
+ * @param positionUs The current playback position, in microseconds.
+ * @return Whether any buffers were dropped.
+ * @throws ExoPlaybackException If an error occurs flushing the decoder.
+ */
+ protected boolean maybeDropBuffersToKeyframe(long positionUs) throws ExoPlaybackException {
+ int droppedSourceBufferCount = skipSource(positionUs);
+ if (droppedSourceBufferCount == 0) {
+ return false;
+ }
+ decoderCounters.droppedToKeyframeCount++;
+ // We dropped some buffers to catch up, so update the decoder counters and flush the decoder,
+ // which releases all pending buffers buffers including the current output buffer.
+ updateDroppedBufferCounters(buffersInCodecCount + droppedSourceBufferCount);
+ flushDecoder();
+ return true;
+ }
+
+ /**
+ * Updates decoder counters to reflect that {@code droppedBufferCount} additional buffers were
+ * dropped.
+ *
+ * @param droppedBufferCount The number of additional dropped buffers.
+ */
+ protected void updateDroppedBufferCounters(int droppedBufferCount) {
+ decoderCounters.droppedBufferCount += droppedBufferCount;
+ droppedFrames += droppedBufferCount;
+ consecutiveDroppedFrameCount += droppedBufferCount;
+ decoderCounters.maxConsecutiveDroppedBufferCount =
+ Math.max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedBufferCount);
+ if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) {
+ maybeNotifyDroppedFrames();
+ }
+ }
+
+ /**
+ * Returns the {@link Capabilities} for the given {@link Format}.
+ *
+ * @param drmSessionManager The renderer's {@link DrmSessionManager}.
+ * @param format The format, which has a video {@link Format#sampleMimeType}.
+ * @return The {@link Capabilities} for this {@link Format}.
+ * @see RendererCapabilities#supportsFormat(Format)
+ */
+ @Capabilities
+ protected abstract int supportsFormatInternal(
+ @Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format);
+
+ /**
+ * Creates a decoder for the given format.
+ *
+ * @param format The format for which a decoder is required.
+ * @param mediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted content.
+ * May be null and can be ignored if decoder does not handle encrypted content.
+ * @return The decoder.
+ * @throws VideoDecoderException If an error occurred creating a suitable decoder.
+ */
+ protected abstract SimpleDecoder<
+ VideoDecoderInputBuffer,
+ ? extends VideoDecoderOutputBuffer,
+ ? extends VideoDecoderException>
+ createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto)
+ throws VideoDecoderException;
+
+ /**
+ * Renders the specified output buffer.
+ *
+ * <p>The implementation of this method takes ownership of the output buffer and is responsible
+ * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future.
+ *
+ * @param outputBuffer {@link VideoDecoderOutputBuffer} to render.
+ * @param presentationTimeUs Presentation time in microseconds.
+ * @param outputFormat Output {@link Format}.
+ * @throws VideoDecoderException If an error occurs when rendering the output buffer.
+ */
+ protected void renderOutputBuffer(
+ VideoDecoderOutputBuffer outputBuffer, long presentationTimeUs, Format outputFormat)
+ throws VideoDecoderException {
+ lastRenderTimeUs = C.msToUs(SystemClock.elapsedRealtime() * 1000);
+ int bufferMode = outputBuffer.mode;
+ boolean renderSurface = bufferMode == C.VIDEO_OUTPUT_MODE_SURFACE_YUV && surface != null;
+ boolean renderYuv = bufferMode == C.VIDEO_OUTPUT_MODE_YUV && outputBufferRenderer != null;
+ if (!renderYuv && !renderSurface) {
+ dropOutputBuffer(outputBuffer);
+ } else {
+ maybeNotifyVideoSizeChanged(outputBuffer.width, outputBuffer.height);
+ if (renderYuv) {
+ outputBufferRenderer.setOutputBuffer(outputBuffer);
+ } else {
+ renderOutputBufferToSurface(outputBuffer, surface);
+ }
+ consecutiveDroppedFrameCount = 0;
+ decoderCounters.renderedOutputBufferCount++;
+ maybeNotifyRenderedFirstFrame();
+ }
+ }
+
+ /**
+ * Renders the specified output buffer to the passed surface.
+ *
+ * <p>The implementation of this method takes ownership of the output buffer and is responsible
+ * for calling {@link VideoDecoderOutputBuffer#release()} either immediately or in the future.
+ *
+ * @param outputBuffer {@link VideoDecoderOutputBuffer} to render.
+ * @param surface Output {@link Surface}.
+ * @throws VideoDecoderException If an error occurs when rendering the output buffer.
+ */
+ protected abstract void renderOutputBufferToSurface(
+ VideoDecoderOutputBuffer outputBuffer, Surface surface) throws VideoDecoderException;
+
+ /**
+ * Sets output surface.
+ *
+ * @param surface Surface.
+ */
+ protected final void setOutputSurface(@Nullable Surface surface) {
+ if (this.surface != surface) {
+ // The output has changed.
+ this.surface = surface;
+ if (surface != null) {
+ outputBufferRenderer = null;
+ outputMode = C.VIDEO_OUTPUT_MODE_SURFACE_YUV;
+ if (decoder != null) {
+ setDecoderOutputMode(outputMode);
+ }
+ onOutputChanged();
+ } else {
+ // The output has been removed. We leave the outputMode of the underlying decoder unchanged
+ // in anticipation that a subsequent output will likely be of the same type.
+ outputMode = C.VIDEO_OUTPUT_MODE_NONE;
+ onOutputRemoved();
+ }
+ } else if (surface != null) {
+ // The output is unchanged and non-null.
+ onOutputReset();
+ }
+ }
+
+ /**
+ * Sets output buffer renderer.
+ *
+ * @param outputBufferRenderer Output buffer renderer.
+ */
+ protected final void setOutputBufferRenderer(
+ @Nullable VideoDecoderOutputBufferRenderer outputBufferRenderer) {
+ if (this.outputBufferRenderer != outputBufferRenderer) {
+ // The output has changed.
+ this.outputBufferRenderer = outputBufferRenderer;
+ if (outputBufferRenderer != null) {
+ surface = null;
+ outputMode = C.VIDEO_OUTPUT_MODE_YUV;
+ if (decoder != null) {
+ setDecoderOutputMode(outputMode);
+ }
+ onOutputChanged();
+ } else {
+ // The output has been removed. We leave the outputMode of the underlying decoder unchanged
+ // in anticipation that a subsequent output will likely be of the same type.
+ outputMode = C.VIDEO_OUTPUT_MODE_NONE;
+ onOutputRemoved();
+ }
+ } else if (outputBufferRenderer != null) {
+ // The output is unchanged and non-null.
+ onOutputReset();
+ }
+ }
+
+ /**
+ * Sets output mode of the decoder.
+ *
+ * @param outputMode Output mode.
+ */
+ protected abstract void setDecoderOutputMode(@C.VideoOutputMode int outputMode);
+
+ // Internal methods.
+
+ private void setSourceDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
+ DrmSession.replaceSession(sourceDrmSession, session);
+ sourceDrmSession = session;
+ }
+
+ private void setDecoderDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
+ DrmSession.replaceSession(decoderDrmSession, session);
+ decoderDrmSession = session;
+ }
+
+ private void maybeInitDecoder() throws ExoPlaybackException {
+ if (decoder != null) {
+ return;
+ }
+
+ setDecoderDrmSession(sourceDrmSession);
+
+ ExoMediaCrypto mediaCrypto = null;
+ if (decoderDrmSession != null) {
+ mediaCrypto = decoderDrmSession.getMediaCrypto();
+ if (mediaCrypto == null) {
+ DrmSessionException drmError = decoderDrmSession.getError();
+ if (drmError != null) {
+ // Continue for now. We may be able to avoid failure if the session recovers, or if a new
+ // input format causes the session to be replaced before it's used.
+ } else {
+ // The drm session isn't open yet.
+ return;
+ }
+ }
+ }
+
+ try {
+ long decoderInitializingTimestamp = SystemClock.elapsedRealtime();
+ decoder = createDecoder(inputFormat, mediaCrypto);
+ setDecoderOutputMode(outputMode);
+ long decoderInitializedTimestamp = SystemClock.elapsedRealtime();
+ onDecoderInitialized(
+ decoder.getName(),
+ decoderInitializedTimestamp,
+ decoderInitializedTimestamp - decoderInitializingTimestamp);
+ decoderCounters.decoderInitCount++;
+ } catch (VideoDecoderException e) {
+ throw createRendererException(e, inputFormat);
+ }
+ }
+
+ private boolean feedInputBuffer() throws VideoDecoderException, ExoPlaybackException {
+ if (decoder == null
+ || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM
+ || inputStreamEnded) {
+ // We need to reinitialize the decoder or the input stream has ended.
+ return false;
+ }
+
+ if (inputBuffer == null) {
+ inputBuffer = decoder.dequeueInputBuffer();
+ if (inputBuffer == null) {
+ return false;
+ }
+ }
+
+ if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) {
+ inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
+ decoder.queueInputBuffer(inputBuffer);
+ inputBuffer = null;
+ decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM;
+ return false;
+ }
+
+ int result;
+ FormatHolder formatHolder = getFormatHolder();
+ if (waitingForKeys) {
+ // We've already read an encrypted sample into buffer, and are waiting for keys.
+ result = C.RESULT_BUFFER_READ;
+ } else {
+ result = readSource(formatHolder, inputBuffer, false);
+ }
+
+ if (result == C.RESULT_NOTHING_READ) {
+ return false;
+ }
+ if (result == C.RESULT_FORMAT_READ) {
+ onInputFormatChanged(formatHolder);
+ return true;
+ }
+ if (inputBuffer.isEndOfStream()) {
+ inputStreamEnded = true;
+ decoder.queueInputBuffer(inputBuffer);
+ inputBuffer = null;
+ return false;
+ }
+ boolean bufferEncrypted = inputBuffer.isEncrypted();
+ waitingForKeys = shouldWaitForKeys(bufferEncrypted);
+ if (waitingForKeys) {
+ return false;
+ }
+ if (waitingForFirstSampleInFormat) {
+ formatQueue.add(inputBuffer.timeUs, inputFormat);
+ waitingForFirstSampleInFormat = false;
+ }
+ inputBuffer.flip();
+ inputBuffer.colorInfo = inputFormat.colorInfo;
+ onQueueInputBuffer(inputBuffer);
+ decoder.queueInputBuffer(inputBuffer);
+ buffersInCodecCount++;
+ decoderReceivedBuffers = true;
+ decoderCounters.inputBufferCount++;
+ inputBuffer = null;
+ return true;
+ }
+
+ /**
+ * Attempts to dequeue an output buffer from the decoder and, if successful, passes it to {@link
+ * #processOutputBuffer(long, long)}.
+ *
+ * @param positionUs The player's current position.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ * @return Whether it may be possible to drain more output data.
+ * @throws ExoPlaybackException If an error occurs draining the output buffer.
+ */
+ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
+ throws ExoPlaybackException, VideoDecoderException {
+ if (outputBuffer == null) {
+ outputBuffer = decoder.dequeueOutputBuffer();
+ if (outputBuffer == null) {
+ return false;
+ }
+ decoderCounters.skippedOutputBufferCount += outputBuffer.skippedOutputBufferCount;
+ buffersInCodecCount -= outputBuffer.skippedOutputBufferCount;
+ }
+
+ if (outputBuffer.isEndOfStream()) {
+ if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
+ // We're waiting to re-initialize the decoder, and have now processed all final buffers.
+ releaseDecoder();
+ maybeInitDecoder();
+ } else {
+ outputBuffer.release();
+ outputBuffer = null;
+ outputStreamEnded = true;
+ }
+ return false;
+ }
+
+ boolean processedOutputBuffer = processOutputBuffer(positionUs, elapsedRealtimeUs);
+ if (processedOutputBuffer) {
+ onProcessedOutputBuffer(outputBuffer.timeUs);
+ outputBuffer = null;
+ }
+ return processedOutputBuffer;
+ }
+
+ /**
+ * Processes {@link #outputBuffer} by rendering it, skipping it or doing nothing, and returns
+ * whether it may be possible to process another output buffer.
+ *
+ * @param positionUs The player's current position.
+ * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
+ * measured at the start of the current iteration of the rendering loop.
+ * @return Whether it may be possible to drain another output buffer.
+ * @throws ExoPlaybackException If an error occurs processing the output buffer.
+ */
+ private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs)
+ throws ExoPlaybackException, VideoDecoderException {
+ if (initialPositionUs == C.TIME_UNSET) {
+ initialPositionUs = positionUs;
+ }
+
+ long earlyUs = outputBuffer.timeUs - positionUs;
+ if (!hasOutput()) {
+ // Skip frames in sync with playback, so we'll be at the right frame if the mode changes.
+ if (isBufferLate(earlyUs)) {
+ skipOutputBuffer(outputBuffer);
+ return true;
+ }
+ return false;
+ }
+
+ long presentationTimeUs = outputBuffer.timeUs - outputStreamOffsetUs;
+ Format format = formatQueue.pollFloor(presentationTimeUs);
+ if (format != null) {
+ outputFormat = format;
+ }
+
+ long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000;
+ boolean isStarted = getState() == STATE_STARTED;
+ if (!renderedFirstFrame
+ || (isStarted
+ && shouldForceRenderOutputBuffer(earlyUs, elapsedRealtimeNowUs - lastRenderTimeUs))) {
+ renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat);
+ return true;
+ }
+
+ if (!isStarted || positionUs == initialPositionUs) {
+ return false;
+ }
+
+ if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs)
+ && maybeDropBuffersToKeyframe(positionUs)) {
+ return false;
+ } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs)) {
+ dropOutputBuffer(outputBuffer);
+ return true;
+ }
+
+ if (earlyUs < 30000) {
+ renderOutputBuffer(outputBuffer, presentationTimeUs, outputFormat);
+ return true;
+ }
+
+ return false;
+ }
+
+ private boolean hasOutput() {
+ return outputMode != C.VIDEO_OUTPUT_MODE_NONE;
+ }
+
+ private void onOutputChanged() {
+ // If we know the video size, report it again immediately.
+ maybeRenotifyVideoSizeChanged();
+ // We haven't rendered to the new output yet.
+ clearRenderedFirstFrame();
+ if (getState() == STATE_STARTED) {
+ setJoiningDeadlineMs();
+ }
+ }
+
+ private void onOutputRemoved() {
+ clearReportedVideoSize();
+ clearRenderedFirstFrame();
+ }
+
+ private void onOutputReset() {
+ // The output is unchanged and non-null. If we know the video size and/or have already
+ // rendered to the output, report these again immediately.
+ maybeRenotifyVideoSizeChanged();
+ maybeRenotifyRenderedFirstFrame();
+ }
+
+ private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException {
+ if (decoderDrmSession == null
+ || (!bufferEncrypted
+ && (playClearSamplesWithoutKeys || decoderDrmSession.playClearSamplesWithoutKeys()))) {
+ return false;
+ }
+ @DrmSession.State int drmSessionState = decoderDrmSession.getState();
+ if (drmSessionState == DrmSession.STATE_ERROR) {
+ throw createRendererException(decoderDrmSession.getError(), inputFormat);
+ }
+ return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS;
+ }
+
+ private void setJoiningDeadlineMs() {
+ joiningDeadlineMs =
+ allowedJoiningTimeMs > 0
+ ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs)
+ : C.TIME_UNSET;
+ }
+
+ private void clearRenderedFirstFrame() {
+ renderedFirstFrame = false;
+ }
+
+ private void maybeNotifyRenderedFirstFrame() {
+ if (!renderedFirstFrame) {
+ renderedFirstFrame = true;
+ eventDispatcher.renderedFirstFrame(surface);
+ }
+ }
+
+ private void maybeRenotifyRenderedFirstFrame() {
+ if (renderedFirstFrame) {
+ eventDispatcher.renderedFirstFrame(surface);
+ }
+ }
+
+ private void clearReportedVideoSize() {
+ reportedWidth = Format.NO_VALUE;
+ reportedHeight = Format.NO_VALUE;
+ }
+
+ private void maybeNotifyVideoSizeChanged(int width, int height) {
+ if (reportedWidth != width || reportedHeight != height) {
+ reportedWidth = width;
+ reportedHeight = height;
+ eventDispatcher.videoSizeChanged(
+ width, height, /* unappliedRotationDegrees= */ 0, /* pixelWidthHeightRatio= */ 1);
+ }
+ }
+
+ private void maybeRenotifyVideoSizeChanged() {
+ if (reportedWidth != Format.NO_VALUE || reportedHeight != Format.NO_VALUE) {
+ eventDispatcher.videoSizeChanged(
+ reportedWidth,
+ reportedHeight,
+ /* unappliedRotationDegrees= */ 0,
+ /* pixelWidthHeightRatio= */ 1);
+ }
+ }
+
+ private void maybeNotifyDroppedFrames() {
+ if (droppedFrames > 0) {
+ long now = SystemClock.elapsedRealtime();
+ long elapsedMs = now - droppedFrameAccumulationStartTimeMs;
+ eventDispatcher.droppedFrames(droppedFrames, elapsedMs);
+ droppedFrames = 0;
+ droppedFrameAccumulationStartTimeMs = now;
+ }
+ }
+
+ private static boolean isBufferLate(long earlyUs) {
+ // Class a buffer as late if it should have been presented more than 30 ms ago.
+ return earlyUs < -30000;
+ }
+
+ private static boolean isBufferVeryLate(long earlyUs) {
+ // Class a buffer as very late if it should have been presented more than 500 ms ago.
+ return earlyUs < -500000;
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderException.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderException.java
new file mode 100644
index 0000000000..dfffbe049b
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderException.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+/** Thrown when a video decoder error occurs. */
+public class VideoDecoderException extends Exception {
+
+ /**
+ * Creates an instance with the given message.
+ *
+ * @param message The detail message for this exception.
+ */
+ public VideoDecoderException(String message) {
+ super(message);
+ }
+
+ /**
+ * Creates an instance with the given message and cause.
+ *
+ * @param message The detail message for this exception.
+ * @param cause the cause (which is saved for later retrieval by the {@link #getCause()} method).
+ * A <tt>null</tt> value is permitted, and indicates that the cause is nonexistent or unknown.
+ */
+ public VideoDecoderException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java
new file mode 100644
index 0000000000..69249dd426
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderGLSurfaceView.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.content.Context;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import androidx.annotation.Nullable;
+
+/**
+ * GLSurfaceView for rendering video output. To render video in this view, call {@link
+ * #getVideoDecoderOutputBufferRenderer()} to get a {@link VideoDecoderOutputBufferRenderer} that
+ * will render video decoder output buffers in this view.
+ *
+ * <p>This view is intended for use only with extension renderers. For other use cases a {@link
+ * android.view.SurfaceView} or {@link android.view.TextureView} should be used instead.
+ */
+public class VideoDecoderGLSurfaceView extends GLSurfaceView {
+
+ private final VideoDecoderRenderer renderer;
+
+ /** @param context A {@link Context}. */
+ public VideoDecoderGLSurfaceView(Context context) {
+ this(context, /* attrs= */ null);
+ }
+
+ /**
+ * @param context A {@link Context}.
+ * @param attrs Custom attributes.
+ */
+ public VideoDecoderGLSurfaceView(Context context, @Nullable AttributeSet attrs) {
+ super(context, attrs);
+ renderer = new VideoDecoderRenderer(this);
+ setPreserveEGLContextOnPause(true);
+ setEGLContextClientVersion(2);
+ setRenderer(renderer);
+ setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+ }
+
+ /** Returns the {@link VideoDecoderOutputBufferRenderer} that will render frames in this view. */
+ public VideoDecoderOutputBufferRenderer getVideoDecoderOutputBufferRenderer() {
+ return renderer;
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java
new file mode 100644
index 0000000000..d911ac3a5a
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderInputBuffer.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+
+/** Input buffer to a video decoder. */
+public class VideoDecoderInputBuffer extends DecoderInputBuffer {
+
+ @Nullable public ColorInfo colorInfo;
+
+ public VideoDecoderInputBuffer() {
+ super(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT);
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java
new file mode 100644
index 0000000000..b09e8b759a
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.OutputBuffer;
+import java.nio.ByteBuffer;
+
+/** Video decoder output buffer containing video frame data. */
+public class VideoDecoderOutputBuffer extends OutputBuffer {
+
+ /** Buffer owner. */
+ public interface Owner {
+
+ /**
+ * Releases the buffer.
+ *
+ * @param outputBuffer Output buffer.
+ */
+ void releaseOutputBuffer(VideoDecoderOutputBuffer outputBuffer);
+ }
+
+ // LINT.IfChange
+ public static final int COLORSPACE_UNKNOWN = 0;
+ public static final int COLORSPACE_BT601 = 1;
+ public static final int COLORSPACE_BT709 = 2;
+ public static final int COLORSPACE_BT2020 = 3;
+ // LINT.ThenChange(
+ // ../../../../../../../../../../extensions/av1/src/main/jni/gav1_jni.cc,
+ // ../../../../../../../../../../extensions/vp9/src/main/jni/vpx_jni.cc
+ // )
+
+ /** Decoder private data. */
+ public int decoderPrivate;
+
+ /** Output mode. */
+ @C.VideoOutputMode public int mode;
+ /** RGB buffer for RGB mode. */
+ @Nullable public ByteBuffer data;
+
+ public int width;
+ public int height;
+ @Nullable public ColorInfo colorInfo;
+
+ /** YUV planes for YUV mode. */
+ @Nullable public ByteBuffer[] yuvPlanes;
+
+ @Nullable public int[] yuvStrides;
+ public int colorspace;
+
+ /**
+ * Supplemental data related to the output frame, if {@link #hasSupplementalData()} returns true.
+ * If present, the buffer is populated with supplemental data from position 0 to its limit.
+ */
+ @Nullable public ByteBuffer supplementalData;
+
+ private final Owner owner;
+
+ /**
+ * Creates VideoDecoderOutputBuffer.
+ *
+ * @param owner Buffer owner.
+ */
+ public VideoDecoderOutputBuffer(Owner owner) {
+ this.owner = owner;
+ }
+
+ @Override
+ public void release() {
+ owner.releaseOutputBuffer(this);
+ }
+
+ /**
+ * Initializes the buffer.
+ *
+ * @param timeUs The presentation timestamp for the buffer, in microseconds.
+ * @param mode The output mode. One of {@link C#VIDEO_OUTPUT_MODE_NONE}, {@link
+ * C#VIDEO_OUTPUT_MODE_YUV} and {@link C#VIDEO_OUTPUT_MODE_SURFACE_YUV}.
+ * @param supplementalData Supplemental data associated with the frame, or {@code null} if not
+ * present. It is safe to reuse the provided buffer after this method returns.
+ */
+ public void init(
+ long timeUs, @C.VideoOutputMode int mode, @Nullable ByteBuffer supplementalData) {
+ this.timeUs = timeUs;
+ this.mode = mode;
+ if (supplementalData != null && supplementalData.hasRemaining()) {
+ addFlag(C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA);
+ int size = supplementalData.limit();
+ if (this.supplementalData == null || this.supplementalData.capacity() < size) {
+ this.supplementalData = ByteBuffer.allocate(size);
+ } else {
+ this.supplementalData.clear();
+ }
+ this.supplementalData.put(supplementalData);
+ this.supplementalData.flip();
+ supplementalData.position(0);
+ } else {
+ this.supplementalData = null;
+ }
+ }
+
+ /**
+ * Resizes the buffer based on the given stride. Called via JNI after decoding completes.
+ *
+ * @return Whether the buffer was resized successfully.
+ */
+ public boolean initForYuvFrame(int width, int height, int yStride, int uvStride, int colorspace) {
+ this.width = width;
+ this.height = height;
+ this.colorspace = colorspace;
+ int uvHeight = (int) (((long) height + 1) / 2);
+ if (!isSafeToMultiply(yStride, height) || !isSafeToMultiply(uvStride, uvHeight)) {
+ return false;
+ }
+ int yLength = yStride * height;
+ int uvLength = uvStride * uvHeight;
+ int minimumYuvSize = yLength + (uvLength * 2);
+ if (!isSafeToMultiply(uvLength, 2) || minimumYuvSize < yLength) {
+ return false;
+ }
+
+ // Initialize data.
+ if (data == null || data.capacity() < minimumYuvSize) {
+ data = ByteBuffer.allocateDirect(minimumYuvSize);
+ } else {
+ data.position(0);
+ data.limit(minimumYuvSize);
+ }
+
+ if (yuvPlanes == null) {
+ yuvPlanes = new ByteBuffer[3];
+ }
+
+ ByteBuffer data = this.data;
+ ByteBuffer[] yuvPlanes = this.yuvPlanes;
+
+ // Rewrapping has to be done on every frame since the stride might have changed.
+ yuvPlanes[0] = data.slice();
+ yuvPlanes[0].limit(yLength);
+ data.position(yLength);
+ yuvPlanes[1] = data.slice();
+ yuvPlanes[1].limit(uvLength);
+ data.position(yLength + uvLength);
+ yuvPlanes[2] = data.slice();
+ yuvPlanes[2].limit(uvLength);
+ if (yuvStrides == null) {
+ yuvStrides = new int[3];
+ }
+ yuvStrides[0] = yStride;
+ yuvStrides[1] = uvStride;
+ yuvStrides[2] = uvStride;
+ return true;
+ }
+
+ /**
+ * Configures the buffer for the given frame dimensions when passing actual frame data via {@link
+ * #decoderPrivate}. Called via JNI after decoding completes.
+ */
+ public void initForPrivateFrame(int width, int height) {
+ this.width = width;
+ this.height = height;
+ }
+
+ /**
+ * Ensures that the result of multiplying individual numbers can fit into the size limit of an
+ * integer.
+ */
+ private static boolean isSafeToMultiply(int a, int b) {
+ return a >= 0 && b >= 0 && !(b > 0 && a >= Integer.MAX_VALUE / b);
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java
new file mode 100644
index 0000000000..f4058ea40f
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderOutputBufferRenderer.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+/** Renders the {@link VideoDecoderOutputBuffer}. */
+public interface VideoDecoderOutputBufferRenderer {
+
+ /**
+ * Sets the output buffer to be rendered. The renderer is responsible for releasing the buffer.
+ *
+ * @param outputBuffer The output buffer to be rendered.
+ */
+ void setOutputBuffer(VideoDecoderOutputBuffer outputBuffer);
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderRenderer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderRenderer.java
new file mode 100644
index 0000000000..1e302e4aaa
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoDecoderRenderer.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.GlUtil;
+import java.nio.FloatBuffer;
+import java.util.concurrent.atomic.AtomicReference;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * GLSurfaceView.Renderer implementation that can render YUV Frames returned by a video decoder
+ * after decoding. It does the YUV to RGB color conversion in the Fragment Shader.
+ */
+/* package */ class VideoDecoderRenderer
+ implements GLSurfaceView.Renderer, VideoDecoderOutputBufferRenderer {
+
+ private static final float[] kColorConversion601 = {
+ 1.164f, 1.164f, 1.164f,
+ 0.0f, -0.392f, 2.017f,
+ 1.596f, -0.813f, 0.0f,
+ };
+
+ private static final float[] kColorConversion709 = {
+ 1.164f, 1.164f, 1.164f,
+ 0.0f, -0.213f, 2.112f,
+ 1.793f, -0.533f, 0.0f,
+ };
+
+ private static final float[] kColorConversion2020 = {
+ 1.168f, 1.168f, 1.168f,
+ 0.0f, -0.188f, 2.148f,
+ 1.683f, -0.652f, 0.0f,
+ };
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc_y;\n"
+ + "varying vec2 interp_tc_u;\n"
+ + "varying vec2 interp_tc_v;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec2 in_tc_y;\n"
+ + "attribute vec2 in_tc_u;\n"
+ + "attribute vec2 in_tc_v;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc_y = in_tc_y;\n"
+ + " interp_tc_u = in_tc_u;\n"
+ + " interp_tc_v = in_tc_v;\n"
+ + "}\n";
+ private static final String[] TEXTURE_UNIFORMS = {"y_tex", "u_tex", "v_tex"};
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc_y;\n"
+ + "varying vec2 interp_tc_u;\n"
+ + "varying vec2 interp_tc_v;\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "uniform mat3 mColorConversion;\n"
+ + "void main() {\n"
+ + " vec3 yuv;\n"
+ + " yuv.x = texture2D(y_tex, interp_tc_y).r - 0.0625;\n"
+ + " yuv.y = texture2D(u_tex, interp_tc_u).r - 0.5;\n"
+ + " yuv.z = texture2D(v_tex, interp_tc_v).r - 0.5;\n"
+ + " gl_FragColor = vec4(mColorConversion * yuv, 1.0);\n"
+ + "}\n";
+
+ private static final FloatBuffer TEXTURE_VERTICES =
+ GlUtil.createBuffer(new float[] {-1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f});
+ private final GLSurfaceView surfaceView;
+ private final int[] yuvTextures = new int[3];
+ private final AtomicReference<VideoDecoderOutputBuffer> pendingOutputBufferReference;
+
+ // Kept in field rather than a local variable in order not to get garbage collected before
+ // glDrawArrays uses it.
+ private FloatBuffer[] textureCoords;
+
+ private int program;
+ private int[] texLocations;
+ private int colorMatrixLocation;
+ private int[] previousWidths;
+ private int[] previousStrides;
+
+ @Nullable
+ private VideoDecoderOutputBuffer renderedOutputBuffer; // Accessed only from the GL thread.
+
+ public VideoDecoderRenderer(GLSurfaceView surfaceView) {
+ this.surfaceView = surfaceView;
+ pendingOutputBufferReference = new AtomicReference<>();
+ textureCoords = new FloatBuffer[3];
+ texLocations = new int[3];
+ previousWidths = new int[3];
+ previousStrides = new int[3];
+ for (int i = 0; i < 3; i++) {
+ previousWidths[i] = previousStrides[i] = -1;
+ }
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ program = GlUtil.compileProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ GLES20.glUseProgram(program);
+ int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
+ GLES20.glEnableVertexAttribArray(posLocation);
+ GLES20.glVertexAttribPointer(posLocation, 2, GLES20.GL_FLOAT, false, 0, TEXTURE_VERTICES);
+ texLocations[0] = GLES20.glGetAttribLocation(program, "in_tc_y");
+ GLES20.glEnableVertexAttribArray(texLocations[0]);
+ texLocations[1] = GLES20.glGetAttribLocation(program, "in_tc_u");
+ GLES20.glEnableVertexAttribArray(texLocations[1]);
+ texLocations[2] = GLES20.glGetAttribLocation(program, "in_tc_v");
+ GLES20.glEnableVertexAttribArray(texLocations[2]);
+ GlUtil.checkGlError();
+ colorMatrixLocation = GLES20.glGetUniformLocation(program, "mColorConversion");
+ GlUtil.checkGlError();
+ setupTextures();
+ GlUtil.checkGlError();
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ GLES20.glViewport(0, 0, width, height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ VideoDecoderOutputBuffer pendingOutputBuffer = pendingOutputBufferReference.getAndSet(null);
+ if (pendingOutputBuffer == null && renderedOutputBuffer == null) {
+ // There is no output buffer to render at the moment.
+ return;
+ }
+ if (pendingOutputBuffer != null) {
+ if (renderedOutputBuffer != null) {
+ renderedOutputBuffer.release();
+ }
+ renderedOutputBuffer = pendingOutputBuffer;
+ }
+ VideoDecoderOutputBuffer outputBuffer = renderedOutputBuffer;
+ // Set color matrix. Assume BT709 if the color space is unknown.
+ float[] colorConversion = kColorConversion709;
+ switch (outputBuffer.colorspace) {
+ case VideoDecoderOutputBuffer.COLORSPACE_BT601:
+ colorConversion = kColorConversion601;
+ break;
+ case VideoDecoderOutputBuffer.COLORSPACE_BT2020:
+ colorConversion = kColorConversion2020;
+ break;
+ case VideoDecoderOutputBuffer.COLORSPACE_BT709:
+ default:
+ break; // Do nothing
+ }
+ GLES20.glUniformMatrix3fv(colorMatrixLocation, 1, false, colorConversion, 0);
+
+ for (int i = 0; i < 3; i++) {
+ int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2;
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ GLES20.glTexImage2D(
+ GLES20.GL_TEXTURE_2D,
+ 0,
+ GLES20.GL_LUMINANCE,
+ outputBuffer.yuvStrides[i],
+ h,
+ 0,
+ GLES20.GL_LUMINANCE,
+ GLES20.GL_UNSIGNED_BYTE,
+ outputBuffer.yuvPlanes[i]);
+ }
+
+ int[] widths = new int[3];
+ widths[0] = outputBuffer.width;
+ // TODO: Handle streams where chroma channels are not stored at half width and height
+ // compared to luma channel. See [Internal: b/142097774].
+ // U and V planes are being stored at half width compared to Y.
+ widths[1] = widths[2] = (widths[0] + 1) / 2;
+ for (int i = 0; i < 3; i++) {
+ // Set cropping of stride if either width or stride has changed.
+ if (previousWidths[i] != widths[i] || previousStrides[i] != outputBuffer.yuvStrides[i]) {
+ Assertions.checkState(outputBuffer.yuvStrides[i] != 0);
+ float widthRatio = (float) widths[i] / outputBuffer.yuvStrides[i];
+ // These buffers are consumed during each call to glDrawArrays. They need to be member
+ // variables rather than local variables in order not to get garbage collected.
+ textureCoords[i] =
+ GlUtil.createBuffer(
+ new float[] {0.0f, 0.0f, 0.0f, 1.0f, widthRatio, 0.0f, widthRatio, 1.0f});
+ GLES20.glVertexAttribPointer(
+ texLocations[i], 2, GLES20.GL_FLOAT, false, 0, textureCoords[i]);
+ previousWidths[i] = widths[i];
+ previousStrides[i] = outputBuffer.yuvStrides[i];
+ }
+ }
+
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ GlUtil.checkGlError();
+ }
+
+ @Override
+ public void setOutputBuffer(VideoDecoderOutputBuffer outputBuffer) {
+ VideoDecoderOutputBuffer oldPendingOutputBuffer =
+ pendingOutputBufferReference.getAndSet(outputBuffer);
+ if (oldPendingOutputBuffer != null) {
+ // The old pending output buffer will never be used for rendering, so release it now.
+ oldPendingOutputBuffer.release();
+ }
+ surfaceView.requestRender();
+ }
+
+ private void setupTextures() {
+ GLES20.glGenTextures(3, yuvTextures, 0);
+ for (int i = 0; i < 3; i++) {
+ GLES20.glUniform1i(GLES20.glGetUniformLocation(program, TEXTURE_UNIFORMS[i]), i);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(
+ GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ }
+ GlUtil.checkGlError();
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java
new file mode 100644
index 0000000000..46e05def5c
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameMetadataListener.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.media.MediaFormat;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+
+/** A listener for metadata corresponding to video frame being rendered. */
+public interface VideoFrameMetadataListener {
+ /**
+ * Called when the video frame about to be rendered. This method is called on the playback thread.
+ *
+ * @param presentationTimeUs The presentation time of the output buffer, in microseconds.
+ * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds.
+ * If the platform API version of the device is less than 21, then this is the best effort.
+ * @param format The format associated with the frame.
+ * @param mediaFormat The framework media format associated with the frame, or {@code null} if not
+ * known or not applicable (e.g., because the frame was not output by a {@link
+ * android.media.MediaCodec MediaCodec}).
+ */
+ void onVideoFrameAboutToBeRendered(
+ long presentationTimeUs,
+ long releaseTimeNs,
+ Format format,
+ @Nullable MediaFormat mediaFormat);
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java
new file mode 100644
index 0000000000..c13cd4b1cb
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoFrameReleaseTimeHelper.java
@@ -0,0 +1,361 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.hardware.display.DisplayManager;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Message;
+import android.view.Choreographer;
+import android.view.Choreographer.FrameCallback;
+import android.view.Display;
+import android.view.WindowManager;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+
+/**
+ * Makes a best effort to adjust frame release timestamps for a smoother visual result.
+ */
+public final class VideoFrameReleaseTimeHelper {
+
+ private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500;
+ private static final long MAX_ALLOWED_DRIFT_NS = 20000000;
+
+ private static final long VSYNC_OFFSET_PERCENTAGE = 80;
+ private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6;
+
+ private final WindowManager windowManager;
+ private final VSyncSampler vsyncSampler;
+ private final DefaultDisplayListener displayListener;
+
+ private long vsyncDurationNs;
+ private long vsyncOffsetNs;
+
+ private long lastFramePresentationTimeUs;
+ private long adjustedLastFrameTimeNs;
+ private long pendingAdjustedFrameTimeNs;
+
+ private boolean haveSync;
+ private long syncUnadjustedReleaseTimeNs;
+ private long syncFramePresentationTimeNs;
+ private long frameCount;
+
+ /**
+ * Constructs an instance that smooths frame release timestamps but does not align them with
+ * the default display's vsync signal.
+ */
+ public VideoFrameReleaseTimeHelper() {
+ this(null);
+ }
+
+ /**
+ * Constructs an instance that smooths frame release timestamps and aligns them with the default
+ * display's vsync signal.
+ *
+ * @param context A context from which information about the default display can be retrieved.
+ */
+ public VideoFrameReleaseTimeHelper(@Nullable Context context) {
+ if (context != null) {
+ context = context.getApplicationContext();
+ windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ } else {
+ windowManager = null;
+ }
+ if (windowManager != null) {
+ displayListener = Util.SDK_INT >= 17 ? maybeBuildDefaultDisplayListenerV17(context) : null;
+ vsyncSampler = VSyncSampler.getInstance();
+ } else {
+ displayListener = null;
+ vsyncSampler = null;
+ }
+ vsyncDurationNs = C.TIME_UNSET;
+ vsyncOffsetNs = C.TIME_UNSET;
+ }
+
+ /**
+ * Enables the helper. Must be called from the playback thread.
+ */
+ public void enable() {
+ haveSync = false;
+ if (windowManager != null) {
+ vsyncSampler.addObserver();
+ if (displayListener != null) {
+ displayListener.register();
+ }
+ updateDefaultDisplayRefreshRateParams();
+ }
+ }
+
+ /**
+ * Disables the helper. Must be called from the playback thread.
+ */
+ public void disable() {
+ if (windowManager != null) {
+ if (displayListener != null) {
+ displayListener.unregister();
+ }
+ vsyncSampler.removeObserver();
+ }
+ }
+
+ /**
+ * Adjusts a frame release timestamp. Must be called from the playback thread.
+ *
+ * @param framePresentationTimeUs The frame's presentation time, in microseconds.
+ * @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
+ * the same time base as {@link System#nanoTime()}.
+ * @return The adjusted frame release timestamp, in nanoseconds and in the same time base as
+ * {@link System#nanoTime()}.
+ */
+ public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs) {
+ long framePresentationTimeNs = framePresentationTimeUs * 1000;
+
+ // Until we know better, the adjustment will be a no-op.
+ long adjustedFrameTimeNs = framePresentationTimeNs;
+ long adjustedReleaseTimeNs = unadjustedReleaseTimeNs;
+
+ if (haveSync) {
+ // See if we've advanced to the next frame.
+ if (framePresentationTimeUs != lastFramePresentationTimeUs) {
+ frameCount++;
+ adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs;
+ }
+ if (frameCount >= MIN_FRAMES_FOR_ADJUSTMENT) {
+ // We're synced and have waited the required number of frames to apply an adjustment.
+ // Calculate the average frame time across all the frames we've seen since the last sync.
+ // This will typically give us a frame rate at a finer granularity than the frame times
+ // themselves (which often only have millisecond granularity).
+ long averageFrameDurationNs = (framePresentationTimeNs - syncFramePresentationTimeNs)
+ / frameCount;
+ // Project the adjusted frame time forward using the average.
+ long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameDurationNs;
+
+ if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
+ haveSync = false;
+ } else {
+ adjustedFrameTimeNs = candidateAdjustedFrameTimeNs;
+ adjustedReleaseTimeNs = syncUnadjustedReleaseTimeNs + adjustedFrameTimeNs
+ - syncFramePresentationTimeNs;
+ }
+ } else {
+ // We're synced but haven't waited the required number of frames to apply an adjustment.
+ // Check drift anyway.
+ if (isDriftTooLarge(framePresentationTimeNs, unadjustedReleaseTimeNs)) {
+ haveSync = false;
+ }
+ }
+ }
+
+ // If we need to sync, do so now.
+ if (!haveSync) {
+ syncFramePresentationTimeNs = framePresentationTimeNs;
+ syncUnadjustedReleaseTimeNs = unadjustedReleaseTimeNs;
+ frameCount = 0;
+ haveSync = true;
+ }
+
+ lastFramePresentationTimeUs = framePresentationTimeUs;
+ pendingAdjustedFrameTimeNs = adjustedFrameTimeNs;
+
+ if (vsyncSampler == null || vsyncDurationNs == C.TIME_UNSET) {
+ return adjustedReleaseTimeNs;
+ }
+ long sampledVsyncTimeNs = vsyncSampler.sampledVsyncTimeNs;
+ if (sampledVsyncTimeNs == C.TIME_UNSET) {
+ return adjustedReleaseTimeNs;
+ }
+
+ // Find the timestamp of the closest vsync. This is the vsync that we're targeting.
+ long snappedTimeNs = closestVsync(adjustedReleaseTimeNs, sampledVsyncTimeNs, vsyncDurationNs);
+ // Apply an offset so that we release before the target vsync, but after the previous one.
+ return snappedTimeNs - vsyncOffsetNs;
+ }
+
+ @TargetApi(17)
+ private DefaultDisplayListener maybeBuildDefaultDisplayListenerV17(Context context) {
+ DisplayManager manager = (DisplayManager) context.getSystemService(Context.DISPLAY_SERVICE);
+ return manager == null ? null : new DefaultDisplayListener(manager);
+ }
+
+ private void updateDefaultDisplayRefreshRateParams() {
+ // Note: If we fail to update the parameters, we leave them set to their previous values.
+ Display defaultDisplay = windowManager.getDefaultDisplay();
+ if (defaultDisplay != null) {
+ double defaultDisplayRefreshRate = defaultDisplay.getRefreshRate();
+ vsyncDurationNs = (long) (C.NANOS_PER_SECOND / defaultDisplayRefreshRate);
+ vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100;
+ }
+ }
+
+ private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) {
+ long elapsedFrameTimeNs = frameTimeNs - syncFramePresentationTimeNs;
+ long elapsedReleaseTimeNs = releaseTimeNs - syncUnadjustedReleaseTimeNs;
+ return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS;
+ }
+
+ private static long closestVsync(long releaseTime, long sampledVsyncTime, long vsyncDuration) {
+ long vsyncCount = (releaseTime - sampledVsyncTime) / vsyncDuration;
+ long snappedTimeNs = sampledVsyncTime + (vsyncDuration * vsyncCount);
+ long snappedBeforeNs;
+ long snappedAfterNs;
+ if (releaseTime <= snappedTimeNs) {
+ snappedBeforeNs = snappedTimeNs - vsyncDuration;
+ snappedAfterNs = snappedTimeNs;
+ } else {
+ snappedBeforeNs = snappedTimeNs;
+ snappedAfterNs = snappedTimeNs + vsyncDuration;
+ }
+ long snappedAfterDiff = snappedAfterNs - releaseTime;
+ long snappedBeforeDiff = releaseTime - snappedBeforeNs;
+ return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs;
+ }
+
+ @TargetApi(17)
+ private final class DefaultDisplayListener implements DisplayManager.DisplayListener {
+
+ private final DisplayManager displayManager;
+
+ public DefaultDisplayListener(DisplayManager displayManager) {
+ this.displayManager = displayManager;
+ }
+
+ public void register() {
+ displayManager.registerDisplayListener(this, null);
+ }
+
+ public void unregister() {
+ displayManager.unregisterDisplayListener(this);
+ }
+
+ @Override
+ public void onDisplayAdded(int displayId) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onDisplayRemoved(int displayId) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onDisplayChanged(int displayId) {
+ if (displayId == Display.DEFAULT_DISPLAY) {
+ updateDefaultDisplayRefreshRateParams();
+ }
+ }
+
+ }
+
+ /**
+ * Samples display vsync timestamps. A single instance using a single {@link Choreographer} is
+ * shared by all {@link VideoFrameReleaseTimeHelper} instances. This is done to avoid a resource
+ * leak in the platform on API levels prior to 23. See [Internal: b/12455729].
+ */
+ private static final class VSyncSampler implements FrameCallback, Handler.Callback {
+
+ public volatile long sampledVsyncTimeNs;
+
+ private static final int CREATE_CHOREOGRAPHER = 0;
+ private static final int MSG_ADD_OBSERVER = 1;
+ private static final int MSG_REMOVE_OBSERVER = 2;
+
+ private static final VSyncSampler INSTANCE = new VSyncSampler();
+
+ private final Handler handler;
+ private final HandlerThread choreographerOwnerThread;
+ private Choreographer choreographer;
+ private int observerCount;
+
+ public static VSyncSampler getInstance() {
+ return INSTANCE;
+ }
+
+ private VSyncSampler() {
+ sampledVsyncTimeNs = C.TIME_UNSET;
+ choreographerOwnerThread = new HandlerThread("ChoreographerOwner:Handler");
+ choreographerOwnerThread.start();
+ handler = Util.createHandler(choreographerOwnerThread.getLooper(), /* callback= */ this);
+ handler.sendEmptyMessage(CREATE_CHOREOGRAPHER);
+ }
+
+ /**
+ * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is observing
+ * {@link #sampledVsyncTimeNs}, and hence that the value should be periodically updated.
+ */
+ public void addObserver() {
+ handler.sendEmptyMessage(MSG_ADD_OBSERVER);
+ }
+
+ /**
+ * Notifies the sampler that a {@link VideoFrameReleaseTimeHelper} is no longer observing
+ * {@link #sampledVsyncTimeNs}.
+ */
+ public void removeObserver() {
+ handler.sendEmptyMessage(MSG_REMOVE_OBSERVER);
+ }
+
+ @Override
+ public void doFrame(long vsyncTimeNs) {
+ sampledVsyncTimeNs = vsyncTimeNs;
+ choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS);
+ }
+
+ @Override
+ public boolean handleMessage(Message message) {
+ switch (message.what) {
+ case CREATE_CHOREOGRAPHER: {
+ createChoreographerInstanceInternal();
+ return true;
+ }
+ case MSG_ADD_OBSERVER: {
+ addObserverInternal();
+ return true;
+ }
+ case MSG_REMOVE_OBSERVER: {
+ removeObserverInternal();
+ return true;
+ }
+ default: {
+ return false;
+ }
+ }
+ }
+
+ private void createChoreographerInstanceInternal() {
+ choreographer = Choreographer.getInstance();
+ }
+
+ private void addObserverInternal() {
+ observerCount++;
+ if (observerCount == 1) {
+ choreographer.postFrameCallback(this);
+ }
+ }
+
+ private void removeObserverInternal() {
+ observerCount--;
+ if (observerCount == 0) {
+ choreographer.removeFrameCallback(this);
+ sampledVsyncTimeNs = C.TIME_UNSET;
+ }
+ }
+
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoListener.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoListener.java
new file mode 100644
index 0000000000..a469366b78
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoListener.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+/** A listener for metadata corresponding to video being rendered. */
+public interface VideoListener {
+
+ /**
+ * Called each time there's a change in the size of the video being rendered.
+ *
+ * @param width The video width in pixels.
+ * @param height The video height in pixels.
+ * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
+ * rotation in degrees that the application should apply for the video for it to be rendered
+ * in the correct orientation. This value will always be zero on API levels 21 and above,
+ * since the renderer will apply all necessary rotations internally. On earlier API levels
+ * this is not possible. Applications that use {@link android.view.TextureView} can apply the
+ * rotation by calling {@link android.view.TextureView#setTransform}. Applications that do not
+ * expect to encounter rotated videos can safely ignore this parameter.
+ * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case of
+ * square pixels this will be equal to 1.0. Different values are indicative of anamorphic
+ * content.
+ */
+ default void onVideoSizeChanged(
+ int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {}
+
+ /**
+ * Called each time there's a change in the size of the surface onto which the video is being
+ * rendered.
+ *
+ * @param width The surface width in pixels. May be {@link
+ * com.google.android.exoplayer2.C#LENGTH_UNSET} if unknown, or 0 if the video is not rendered
+ * onto a surface.
+ * @param height The surface height in pixels. May be {@link
+ * com.google.android.exoplayer2.C#LENGTH_UNSET} if unknown, or 0 if the video is not rendered
+ * onto a surface.
+ */
+ default void onSurfaceSizeChanged(int width, int height) {}
+
+ /**
+ * Called when a frame is rendered for the first time since setting the surface, and when a frame
+ * is rendered for the first time since a video track was selected.
+ */
+ default void onRenderedFirstFrame() {}
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoRendererEventListener.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoRendererEventListener.java
new file mode 100644
index 0000000000..6509a353b2
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/VideoRendererEventListener.java
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import static org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util.castNonNull;
+
+import android.os.Handler;
+import android.os.SystemClock;
+import android.view.Surface;
+import android.view.TextureView;
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Renderer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderCounters;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+
+/**
+ * Listener of video {@link Renderer} events. All methods have no-op default implementations to
+ * allow selective overrides.
+ */
+public interface VideoRendererEventListener {
+
+ /**
+ * Called when the renderer is enabled.
+ *
+ * @param counters {@link DecoderCounters} that will be updated by the renderer for as long as it
+ * remains enabled.
+ */
+ default void onVideoEnabled(DecoderCounters counters) {}
+
+ /**
+ * Called when a decoder is created.
+ *
+ * @param decoderName The decoder that was created.
+ * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization
+ * finished.
+ * @param initializationDurationMs The time taken to initialize the decoder in milliseconds.
+ */
+ default void onVideoDecoderInitialized(
+ String decoderName, long initializedTimestampMs, long initializationDurationMs) {}
+
+ /**
+ * Called when the format of the media being consumed by the renderer changes.
+ *
+ * @param format The new format.
+ */
+ default void onVideoInputFormatChanged(Format format) {}
+
+ /**
+ * Called to report the number of frames dropped by the renderer. Dropped frames are reported
+ * whenever the renderer is stopped having dropped frames, and optionally, whenever the count
+ * reaches a specified threshold whilst the renderer is started.
+ *
+ * @param count The number of dropped frames.
+ * @param elapsedMs The duration in milliseconds over which the frames were dropped. This duration
+ * is timed from when the renderer was started or from when dropped frames were last reported
+ * (whichever was more recent), and not from when the first of the reported drops occurred.
+ */
+ default void onDroppedFrames(int count, long elapsedMs) {}
+
+ /**
+ * Called before a frame is rendered for the first time since setting the surface, and each time
+ * there's a change in the size, rotation or pixel aspect ratio of the video being rendered.
+ *
+ * @param width The video width in pixels.
+ * @param height The video height in pixels.
+ * @param unappliedRotationDegrees For videos that require a rotation, this is the clockwise
+ * rotation in degrees that the application should apply for the video for it to be rendered
+ * in the correct orientation. This value will always be zero on API levels 21 and above,
+ * since the renderer will apply all necessary rotations internally. On earlier API levels
+ * this is not possible. Applications that use {@link TextureView} can apply the rotation by
+ * calling {@link TextureView#setTransform}. Applications that do not expect to encounter
+ * rotated videos can safely ignore this parameter.
+ * @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case of
+ * square pixels this will be equal to 1.0. Different values are indicative of anamorphic
+ * content.
+ */
+ default void onVideoSizeChanged(
+ int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {}
+
+ /**
+ * Called when a frame is rendered for the first time since setting the surface, and when a frame
+ * is rendered for the first time since the renderer was reset.
+ *
+ * @param surface The {@link Surface} to which a first frame has been rendered, or {@code null} if
+ * the renderer renders to something that isn't a {@link Surface}.
+ */
+ default void onRenderedFirstFrame(@Nullable Surface surface) {}
+
+ /**
+ * Called when the renderer is disabled.
+ *
+ * @param counters {@link DecoderCounters} that were updated by the renderer.
+ */
+ default void onVideoDisabled(DecoderCounters counters) {}
+
+ /**
+ * Dispatches events to a {@link VideoRendererEventListener}.
+ */
+ final class EventDispatcher {
+
+ @Nullable private final Handler handler;
+ @Nullable private final VideoRendererEventListener listener;
+
+ /**
+ * @param handler A handler for dispatching events, or null if creating a dummy instance.
+ * @param listener The listener to which events should be dispatched, or null if creating a
+ * dummy instance.
+ */
+ public EventDispatcher(@Nullable Handler handler,
+ @Nullable VideoRendererEventListener listener) {
+ this.handler = listener != null ? Assertions.checkNotNull(handler) : null;
+ this.listener = listener;
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onVideoEnabled(DecoderCounters)}. */
+ public void enabled(DecoderCounters decoderCounters) {
+ if (handler != null) {
+ handler.post(() -> castNonNull(listener).onVideoEnabled(decoderCounters));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onVideoDecoderInitialized(String, long, long)}. */
+ public void decoderInitialized(
+ String decoderName, long initializedTimestampMs, long initializationDurationMs) {
+ if (handler != null) {
+ handler.post(
+ () ->
+ castNonNull(listener)
+ .onVideoDecoderInitialized(
+ decoderName, initializedTimestampMs, initializationDurationMs));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onVideoInputFormatChanged(Format)}. */
+ public void inputFormatChanged(Format format) {
+ if (handler != null) {
+ handler.post(() -> castNonNull(listener).onVideoInputFormatChanged(format));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */
+ public void droppedFrames(int droppedFrameCount, long elapsedMs) {
+ if (handler != null) {
+ handler.post(() -> castNonNull(listener).onDroppedFrames(droppedFrameCount, elapsedMs));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onVideoSizeChanged(int, int, int, float)}. */
+ public void videoSizeChanged(
+ int width,
+ int height,
+ final int unappliedRotationDegrees,
+ final float pixelWidthHeightRatio) {
+ if (handler != null) {
+ handler.post(
+ () ->
+ castNonNull(listener)
+ .onVideoSizeChanged(
+ width, height, unappliedRotationDegrees, pixelWidthHeightRatio));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onRenderedFirstFrame(Surface)}. */
+ public void renderedFirstFrame(@Nullable Surface surface) {
+ if (handler != null) {
+ handler.post(() -> castNonNull(listener).onRenderedFirstFrame(surface));
+ }
+ }
+
+ /** Invokes {@link VideoRendererEventListener#onVideoDisabled(DecoderCounters)}. */
+ public void disabled(DecoderCounters counters) {
+ counters.ensureUpdated();
+ if (handler != null) {
+ handler.post(
+ () -> {
+ counters.ensureUpdated();
+ castNonNull(listener).onVideoDisabled(counters);
+ });
+ }
+ }
+
+ }
+
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/package-info.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/package-info.java
new file mode 100644
index 0000000000..7053c14d16
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@NonNullApi
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video;
+
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionListener.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionListener.java
new file mode 100644
index 0000000000..87bd94c5bc
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionListener.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+/** Listens camera motion. */
+public interface CameraMotionListener {
+
+ /**
+ * Called when a new camera motion is read. This method is called on the playback thread.
+ *
+ * @param timeUs The presentation time of the data.
+ * @param rotation Angle axis orientation in radians representing the rotation from camera
+ * coordinate system to world coordinate system.
+ */
+ void onCameraMotion(long timeUs, float[] rotation);
+
+ /** Called when the camera motion track position is reset or the track is disabled. */
+ void onCameraMotionReset();
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java
new file mode 100644
index 0000000000..378363aca0
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/CameraMotionRenderer.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.BaseRenderer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.ExoPlaybackException;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Format;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.FormatHolder;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.Renderer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.RendererCapabilities;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.decoder.DecoderInputBuffer;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.MimeTypes;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+import java.nio.ByteBuffer;
+
+/** A {@link Renderer} that parses the camera motion track. */
+public class CameraMotionRenderer extends BaseRenderer {
+
+ // The amount of time to read samples ahead of the current time.
+ private static final int SAMPLE_WINDOW_DURATION_US = 100000;
+
+ private final DecoderInputBuffer buffer;
+ private final ParsableByteArray scratch;
+
+ private long offsetUs;
+ @Nullable private CameraMotionListener listener;
+ private long lastTimestampUs;
+
+ public CameraMotionRenderer() {
+ super(C.TRACK_TYPE_CAMERA_MOTION);
+ buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL);
+ scratch = new ParsableByteArray();
+ }
+
+ @Override
+ @Capabilities
+ public int supportsFormat(Format format) {
+ return MimeTypes.APPLICATION_CAMERA_MOTION.equals(format.sampleMimeType)
+ ? RendererCapabilities.create(FORMAT_HANDLED)
+ : RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
+ }
+
+ @Override
+ public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException {
+ if (messageType == C.MSG_SET_CAMERA_MOTION_LISTENER) {
+ listener = (CameraMotionListener) message;
+ } else {
+ super.handleMessage(messageType, message);
+ }
+ }
+
+ @Override
+ protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException {
+ this.offsetUs = offsetUs;
+ }
+
+ @Override
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
+ resetListener();
+ }
+
+ @Override
+ protected void onDisabled() {
+ resetListener();
+ }
+
+ @Override
+ public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
+ // Keep reading available samples as long as the sample time is not too far into the future.
+ while (!hasReadStreamToEnd() && lastTimestampUs < positionUs + SAMPLE_WINDOW_DURATION_US) {
+ buffer.clear();
+ FormatHolder formatHolder = getFormatHolder();
+ int result = readSource(formatHolder, buffer, /* formatRequired= */ false);
+ if (result != C.RESULT_BUFFER_READ || buffer.isEndOfStream()) {
+ return;
+ }
+
+ buffer.flip();
+ lastTimestampUs = buffer.timeUs;
+ if (listener != null) {
+ float[] rotation = parseMetadata(Util.castNonNull(buffer.data));
+ if (rotation != null) {
+ Util.castNonNull(listener).onCameraMotion(lastTimestampUs - offsetUs, rotation);
+ }
+ }
+ }
+ }
+
+ @Override
+ public boolean isEnded() {
+ return hasReadStreamToEnd();
+ }
+
+ @Override
+ public boolean isReady() {
+ return true;
+ }
+
+ private @Nullable float[] parseMetadata(ByteBuffer data) {
+ if (data.remaining() != 16) {
+ return null;
+ }
+ scratch.reset(data.array(), data.limit());
+ scratch.setPosition(data.arrayOffset() + 4); // skip reserved bytes too.
+ float[] result = new float[3];
+ for (int i = 0; i < 3; i++) {
+ result[i] = Float.intBitsToFloat(scratch.readLittleEndianInt());
+ }
+ return result;
+ }
+
+ private void resetListener() {
+ lastTimestampUs = 0;
+ if (listener != null) {
+ listener.onCameraMotionReset();
+ }
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java
new file mode 100644
index 0000000000..450058fb6a
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/FrameRotationQueue.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+import android.opengl.Matrix;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.TimedValueQueue;
+
+/**
+ * This class serves multiple purposes:
+ *
+ * <ul>
+ * <li>Queues the rotation metadata extracted from camera motion track.
+ * <li>Converts the metadata to rotation matrices in OpenGl coordinate system.
+ * <li>Recenters the rotations to componsate the yaw of the initial rotation.
+ * </ul>
+ */
+public final class FrameRotationQueue {
+ private final float[] recenterMatrix;
+ private final float[] rotationMatrix;
+ private final TimedValueQueue<float[]> rotations;
+ private boolean recenterMatrixComputed;
+
+ public FrameRotationQueue() {
+ recenterMatrix = new float[16];
+ rotationMatrix = new float[16];
+ rotations = new TimedValueQueue<>();
+ }
+
+ /**
+ * Sets a rotation for a given timestamp.
+ *
+ * @param timestampUs Timestamp of the rotation.
+ * @param angleAxis Angle axis orientation in radians representing the rotation from camera
+ * coordinate system to world coordinate system.
+ */
+ public void setRotation(long timestampUs, float[] angleAxis) {
+ rotations.add(timestampUs, angleAxis);
+ }
+
+ /** Removes all of the rotations and forces rotations to be recentered. */
+ public void reset() {
+ rotations.clear();
+ recenterMatrixComputed = false;
+ }
+
+ /**
+ * Copies the rotation matrix with the greatest timestamp which is less than or equal to the given
+ * timestamp to {@code matrix}. Removes all older rotations and the returned one from the queue.
+ * Does nothing if there is no such rotation.
+ *
+ * @param matrix The rotation matrix.
+ * @param timestampUs The time in microseconds to query the rotation.
+ * @return Whether a rotation matrix is copied to {@code matrix}.
+ */
+ public boolean pollRotationMatrix(float[] matrix, long timestampUs) {
+ float[] rotation = rotations.pollFloor(timestampUs);
+ if (rotation == null) {
+ return false;
+ }
+ // TODO [Internal: b/113315546]: Slerp between the floor and ceil rotation.
+ getRotationMatrixFromAngleAxis(rotationMatrix, rotation);
+ if (!recenterMatrixComputed) {
+ computeRecenterMatrix(recenterMatrix, rotationMatrix);
+ recenterMatrixComputed = true;
+ }
+ Matrix.multiplyMM(matrix, 0, recenterMatrix, 0, rotationMatrix, 0);
+ return true;
+ }
+
+ /**
+ * Computes a recentering matrix from the given angle-axis rotation only accounting for yaw. Roll
+ * and tilt will not be compensated.
+ *
+ * @param recenterMatrix The recenter matrix.
+ * @param rotationMatrix The rotation matrix.
+ */
+ public static void computeRecenterMatrix(float[] recenterMatrix, float[] rotationMatrix) {
+ // The re-centering matrix is computed as follows:
+ // recenter.row(2) = temp.col(2).transpose();
+ // recenter.row(0) = recenter.row(1).cross(recenter.row(2)).normalized();
+ // recenter.row(2) = recenter.row(0).cross(recenter.row(1)).normalized();
+ // | temp[10] 0 -temp[8] 0|
+ // | 0 1 0 0|
+ // recenter = | temp[8] 0 temp[10] 0|
+ // | 0 0 0 1|
+ Matrix.setIdentityM(recenterMatrix, 0);
+ float normRowSqr =
+ rotationMatrix[10] * rotationMatrix[10] + rotationMatrix[8] * rotationMatrix[8];
+ float normRow = (float) Math.sqrt(normRowSqr);
+ recenterMatrix[0] = rotationMatrix[10] / normRow;
+ recenterMatrix[2] = rotationMatrix[8] / normRow;
+ recenterMatrix[8] = -rotationMatrix[8] / normRow;
+ recenterMatrix[10] = rotationMatrix[10] / normRow;
+ }
+
+ private static void getRotationMatrixFromAngleAxis(float[] matrix, float[] angleAxis) {
+ // Convert coordinates to OpenGL coordinates.
+ // CAMM motion metadata: +x right, +y down, and +z forward.
+ // OpenGL: +x right, +y up, -z forwards
+ float x = angleAxis[0];
+ float y = -angleAxis[1];
+ float z = -angleAxis[2];
+ float angleRad = Matrix.length(x, y, z);
+ if (angleRad != 0) {
+ float angleDeg = (float) Math.toDegrees(angleRad);
+ Matrix.setRotateM(matrix, 0, angleDeg, x / angleRad, y / angleRad, z / angleRad);
+ } else {
+ Matrix.setIdentityM(matrix, 0);
+ }
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/Projection.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/Projection.java
new file mode 100644
index 0000000000..e3d614cab3
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/Projection.java
@@ -0,0 +1,236 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+import androidx.annotation.IntDef;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C.StereoMode;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Assertions;
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/** The projection mesh used with 360/VR videos. */
+public final class Projection {
+
+ /** Enforces allowed (sub) mesh draw modes. */
+ @Documented
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({DRAW_MODE_TRIANGLES, DRAW_MODE_TRIANGLES_STRIP, DRAW_MODE_TRIANGLES_FAN})
+ public @interface DrawMode {}
+ /** Triangle draw mode. */
+ public static final int DRAW_MODE_TRIANGLES = 0;
+ /** Triangle strip draw mode. */
+ public static final int DRAW_MODE_TRIANGLES_STRIP = 1;
+ /** Triangle fan draw mode. */
+ public static final int DRAW_MODE_TRIANGLES_FAN = 2;
+
+ /** Number of position coordinates per vertex. */
+ public static final int TEXTURE_COORDS_PER_VERTEX = 2;
+ /** Number of texture coordinates per vertex. */
+ public static final int POSITION_COORDS_PER_VERTEX = 3;
+
+ /**
+ * Generates a complete sphere equirectangular projection.
+ *
+ * @param stereoMode A {@link C.StereoMode} value.
+ */
+ public static Projection createEquirectangular(@C.StereoMode int stereoMode) {
+ return createEquirectangular(
+ /* radius= */ 50, // Should be large enough that there are no stereo artifacts.
+ /* latitudes= */ 36, // Should be large enough to prevent videos looking wavy.
+ /* longitudes= */ 72, // Should be large enough to prevent videos looking wavy.
+ /* verticalFovDegrees= */ 180,
+ /* horizontalFovDegrees= */ 360,
+ stereoMode);
+ }
+
+ /**
+ * Generates an equirectangular projection.
+ *
+ * @param radius Size of the sphere. Must be &gt; 0.
+ * @param latitudes Number of rows that make up the sphere. Must be &gt;= 1.
+ * @param longitudes Number of columns that make up the sphere. Must be &gt;= 1.
+ * @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in
+ * (0, 180].
+ * @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be
+ * in (0, 360].
+ * @param stereoMode A {@link C.StereoMode} value.
+ * @return an equirectangular projection.
+ */
+ public static Projection createEquirectangular(
+ float radius,
+ int latitudes,
+ int longitudes,
+ float verticalFovDegrees,
+ float horizontalFovDegrees,
+ @C.StereoMode int stereoMode) {
+ Assertions.checkArgument(radius > 0);
+ Assertions.checkArgument(latitudes >= 1);
+ Assertions.checkArgument(longitudes >= 1);
+ Assertions.checkArgument(verticalFovDegrees > 0 && verticalFovDegrees <= 180);
+ Assertions.checkArgument(horizontalFovDegrees > 0 && horizontalFovDegrees <= 360);
+
+ // Compute angular size in radians of each UV quad.
+ float verticalFovRads = (float) Math.toRadians(verticalFovDegrees);
+ float horizontalFovRads = (float) Math.toRadians(horizontalFovDegrees);
+ float quadHeightRads = verticalFovRads / latitudes;
+ float quadWidthRads = horizontalFovRads / longitudes;
+
+ // Each latitude strip has 2 * (longitudes quads + extra edge) vertices + 2 degenerate vertices.
+ int vertexCount = (2 * (longitudes + 1) + 2) * latitudes;
+ // Buffer to return.
+ float[] vertexData = new float[vertexCount * POSITION_COORDS_PER_VERTEX];
+ float[] textureData = new float[vertexCount * TEXTURE_COORDS_PER_VERTEX];
+
+ // Generate the data for the sphere which is a set of triangle strips representing each
+ // latitude band.
+ int vOffset = 0; // Offset into the vertexData array.
+ int tOffset = 0; // Offset into the textureData array.
+ // (i, j) represents a quad in the equirectangular sphere.
+ for (int j = 0; j < latitudes; ++j) { // For each horizontal triangle strip.
+ // Each latitude band lies between the two phi values. Each vertical edge on a band lies on
+ // a theta value.
+ float phiLow = quadHeightRads * j - verticalFovRads / 2;
+ float phiHigh = quadHeightRads * (j + 1) - verticalFovRads / 2;
+
+ for (int i = 0; i < longitudes + 1; ++i) { // For each vertical edge in the band.
+ for (int k = 0; k < 2; ++k) { // For low and high points on an edge.
+ // For each point, determine it's position in polar coordinates.
+ float phi = k == 0 ? phiLow : phiHigh;
+ float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2;
+
+ // Set vertex position data as Cartesian coordinates.
+ vertexData[vOffset++] = -(float) (radius * Math.sin(theta) * Math.cos(phi));
+ vertexData[vOffset++] = (float) (radius * Math.sin(phi));
+ vertexData[vOffset++] = (float) (radius * Math.cos(theta) * Math.cos(phi));
+
+ textureData[tOffset++] = i * quadWidthRads / horizontalFovRads;
+ textureData[tOffset++] = (j + k) * quadHeightRads / verticalFovRads;
+
+ // Break up the triangle strip with degenerate vertices by copying first and last points.
+ if ((i == 0 && k == 0) || (i == longitudes && k == 1)) {
+ System.arraycopy(
+ vertexData,
+ vOffset - POSITION_COORDS_PER_VERTEX,
+ vertexData,
+ vOffset,
+ POSITION_COORDS_PER_VERTEX);
+ vOffset += POSITION_COORDS_PER_VERTEX;
+ System.arraycopy(
+ textureData,
+ tOffset - TEXTURE_COORDS_PER_VERTEX,
+ textureData,
+ tOffset,
+ TEXTURE_COORDS_PER_VERTEX);
+ tOffset += TEXTURE_COORDS_PER_VERTEX;
+ }
+ }
+ // Move on to the next vertical edge in the triangle strip.
+ }
+ // Move on to the next triangle strip.
+ }
+ SubMesh subMesh =
+ new SubMesh(SubMesh.VIDEO_TEXTURE_ID, vertexData, textureData, DRAW_MODE_TRIANGLES_STRIP);
+ return new Projection(new Mesh(subMesh), stereoMode);
+ }
+
+ /** The Mesh corresponding to the left eye. */
+ public final Mesh leftMesh;
+ /**
+ * The Mesh corresponding to the right eye. If {@code singleMesh} is true then this mesh is
+ * identical to {@link #leftMesh}.
+ */
+ public final Mesh rightMesh;
+ /** The stereo mode. */
+ public final @StereoMode int stereoMode;
+ /** Whether the left and right mesh are identical. */
+ public final boolean singleMesh;
+
+ /**
+ * Creates a Projection with single mesh.
+ *
+ * @param mesh the Mesh for both eyes.
+ * @param stereoMode A {@link StereoMode} value.
+ */
+ public Projection(Mesh mesh, int stereoMode) {
+ this(mesh, mesh, stereoMode);
+ }
+
+ /**
+ * Creates a Projection with dual mesh. Use {@link #Projection(Mesh, int)} if there is single mesh
+ * for both eyes.
+ *
+ * @param leftMesh the Mesh corresponding to the left eye.
+ * @param rightMesh the Mesh corresponding to the right eye.
+ * @param stereoMode A {@link C.StereoMode} value.
+ */
+ public Projection(Mesh leftMesh, Mesh rightMesh, int stereoMode) {
+ this.leftMesh = leftMesh;
+ this.rightMesh = rightMesh;
+ this.stereoMode = stereoMode;
+ this.singleMesh = leftMesh == rightMesh;
+ }
+
+ /** The sub mesh associated with the {@link Mesh}. */
+ public static final class SubMesh {
+ /** Texture ID for video frames. */
+ public static final int VIDEO_TEXTURE_ID = 0;
+
+ /** Texture ID. */
+ public final int textureId;
+ /** The drawing mode. One of {@link DrawMode}. */
+ public final @DrawMode int mode;
+ /** The SubMesh vertices. */
+ public final float[] vertices;
+ /** The SubMesh texture coordinates. */
+ public final float[] textureCoords;
+
+ public SubMesh(int textureId, float[] vertices, float[] textureCoords, @DrawMode int mode) {
+ this.textureId = textureId;
+ Assertions.checkArgument(
+ vertices.length * (long) TEXTURE_COORDS_PER_VERTEX
+ == textureCoords.length * (long) POSITION_COORDS_PER_VERTEX);
+ this.vertices = vertices;
+ this.textureCoords = textureCoords;
+ this.mode = mode;
+ }
+
+ /** Returns the SubMesh vertex count. */
+ public int getVertexCount() {
+ return vertices.length / POSITION_COORDS_PER_VERTEX;
+ }
+ }
+
+ /** A Mesh associated with the projection scene. */
+ public static final class Mesh {
+ private final SubMesh[] subMeshes;
+
+ public Mesh(SubMesh... subMeshes) {
+ this.subMeshes = subMeshes;
+ }
+
+ /** Returns the number of sub meshes. */
+ public int getSubMeshCount() {
+ return subMeshes.length;
+ }
+
+ /** Returns the SubMesh for the given index. */
+ public SubMesh getSubMesh(int index) {
+ return subMeshes[index];
+ }
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java
new file mode 100644
index 0000000000..cff4b2845d
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/ProjectionDecoder.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+import androidx.annotation.Nullable;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.C;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableBitArray;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.ParsableByteArray;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.Util;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical.Projection.Mesh;
+import org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical.Projection.SubMesh;
+import java.util.ArrayList;
+import java.util.zip.Inflater;
+
+/**
+ * A decoder for the projection mesh.
+ *
+ * <p>The mesh boxes parsed are described at <a
+ * href="https://github.com/google/spatial-media/blob/master/docs/spherical-video-v2-rfc.md">
+ * Spherical Video V2 RFC</a>.
+ *
+ * <p>The decoder does not perform CRC checks at the moment.
+ */
+public final class ProjectionDecoder {
+
+ private static final int TYPE_YTMP = 0x79746d70;
+ private static final int TYPE_MSHP = 0x6d736870;
+ private static final int TYPE_RAW = 0x72617720;
+ private static final int TYPE_DFL8 = 0x64666c38;
+ private static final int TYPE_MESH = 0x6d657368;
+ private static final int TYPE_PROJ = 0x70726f6a;
+
+ // Sanity limits to prevent a bad file from creating an OOM situation. We don't expect a mesh to
+ // exceed these limits.
+ private static final int MAX_COORDINATE_COUNT = 10000;
+ private static final int MAX_VERTEX_COUNT = 32 * 1000;
+ private static final int MAX_TRIANGLE_INDICES = 128 * 1000;
+
+ private ProjectionDecoder() {}
+
+ /*
+ * Decodes the projection data.
+ *
+ * @param projectionData The projection data.
+ * @param stereoMode A {@link C.StereoMode} value.
+ * @return The projection or null if the data can't be decoded.
+ */
+ public static @Nullable Projection decode(byte[] projectionData, @C.StereoMode int stereoMode) {
+ ParsableByteArray input = new ParsableByteArray(projectionData);
+ // MP4 containers include the proj box but webm containers do not.
+ // Both containers use mshp.
+ ArrayList<Mesh> meshes = null;
+ try {
+ meshes = isProj(input) ? parseProj(input) : parseMshp(input);
+ } catch (ArrayIndexOutOfBoundsException ignored) {
+ // Do nothing.
+ }
+ if (meshes == null) {
+ return null;
+ } else {
+ switch (meshes.size()) {
+ case 1:
+ return new Projection(meshes.get(0), stereoMode);
+ case 2:
+ return new Projection(meshes.get(0), meshes.get(1), stereoMode);
+ case 0:
+ default:
+ return null;
+ }
+ }
+ }
+
+ /** Returns true if the input contains a proj box. Indicates MP4 container. */
+ private static boolean isProj(ParsableByteArray input) {
+ input.skipBytes(4); // size
+ int type = input.readInt();
+ input.setPosition(0);
+ return type == TYPE_PROJ;
+ }
+
+ private static @Nullable ArrayList<Mesh> parseProj(ParsableByteArray input) {
+ input.skipBytes(8); // size and type.
+ int position = input.getPosition();
+ int limit = input.limit();
+ while (position < limit) {
+ int childEnd = position + input.readInt();
+ if (childEnd <= position || childEnd > limit) {
+ return null;
+ }
+ int childAtomType = input.readInt();
+ // Some early files named the atom ytmp rather than mshp.
+ if (childAtomType == TYPE_YTMP || childAtomType == TYPE_MSHP) {
+ input.setLimit(childEnd);
+ return parseMshp(input);
+ }
+ position = childEnd;
+ input.setPosition(position);
+ }
+ return null;
+ }
+
+ private static @Nullable ArrayList<Mesh> parseMshp(ParsableByteArray input) {
+ int version = input.readUnsignedByte();
+ if (version != 0) {
+ return null;
+ }
+ input.skipBytes(7); // flags + crc.
+ int encoding = input.readInt();
+ if (encoding == TYPE_DFL8) {
+ ParsableByteArray output = new ParsableByteArray();
+ Inflater inflater = new Inflater(true);
+ try {
+ if (!Util.inflate(input, output, inflater)) {
+ return null;
+ }
+ } finally {
+ inflater.end();
+ }
+ input = output;
+ } else if (encoding != TYPE_RAW) {
+ return null;
+ }
+ return parseRawMshpData(input);
+ }
+
+ /** Parses MSHP data after the encoding_four_cc field. */
+ private static @Nullable ArrayList<Mesh> parseRawMshpData(ParsableByteArray input) {
+ ArrayList<Mesh> meshes = new ArrayList<>();
+ int position = input.getPosition();
+ int limit = input.limit();
+ while (position < limit) {
+ int childEnd = position + input.readInt();
+ if (childEnd <= position || childEnd > limit) {
+ return null;
+ }
+ int childAtomType = input.readInt();
+ if (childAtomType == TYPE_MESH) {
+ Mesh mesh = parseMesh(input);
+ if (mesh == null) {
+ return null;
+ }
+ meshes.add(mesh);
+ }
+ position = childEnd;
+ input.setPosition(position);
+ }
+ return meshes;
+ }
+
+ private static @Nullable Mesh parseMesh(ParsableByteArray input) {
+ // Read the coordinates.
+ int coordinateCount = input.readInt();
+ if (coordinateCount > MAX_COORDINATE_COUNT) {
+ return null;
+ }
+ float[] coordinates = new float[coordinateCount];
+ for (int coordinate = 0; coordinate < coordinateCount; coordinate++) {
+ coordinates[coordinate] = input.readFloat();
+ }
+ // Read the vertices.
+ int vertexCount = input.readInt();
+ if (vertexCount > MAX_VERTEX_COUNT) {
+ return null;
+ }
+
+ final double log2 = Math.log(2.0);
+ int coordinateCountSizeBits = (int) Math.ceil(Math.log(2.0 * coordinateCount) / log2);
+
+ ParsableBitArray bitInput = new ParsableBitArray(input.data);
+ bitInput.setPosition(input.getPosition() * 8);
+ float[] vertices = new float[vertexCount * 5];
+ int[] coordinateIndices = new int[5];
+ int vertexIndex = 0;
+ for (int vertex = 0; vertex < vertexCount; vertex++) {
+ for (int i = 0; i < 5; i++) {
+ int coordinateIndex =
+ coordinateIndices[i] + decodeZigZag(bitInput.readBits(coordinateCountSizeBits));
+ if (coordinateIndex >= coordinateCount || coordinateIndex < 0) {
+ return null;
+ }
+ vertices[vertexIndex++] = coordinates[coordinateIndex];
+ coordinateIndices[i] = coordinateIndex;
+ }
+ }
+
+ // Pad to next byte boundary
+ bitInput.setPosition(((bitInput.getPosition() + 7) & ~7));
+
+ int subMeshCount = bitInput.readBits(32);
+ SubMesh[] subMeshes = new SubMesh[subMeshCount];
+ for (int i = 0; i < subMeshCount; i++) {
+ int textureId = bitInput.readBits(8);
+ int drawMode = bitInput.readBits(8);
+ int triangleIndexCount = bitInput.readBits(32);
+ if (triangleIndexCount > MAX_TRIANGLE_INDICES) {
+ return null;
+ }
+ int vertexCountSizeBits = (int) Math.ceil(Math.log(2.0 * vertexCount) / log2);
+ int index = 0;
+ float[] triangleVertices = new float[triangleIndexCount * 3];
+ float[] textureCoords = new float[triangleIndexCount * 2];
+ for (int counter = 0; counter < triangleIndexCount; counter++) {
+ index += decodeZigZag(bitInput.readBits(vertexCountSizeBits));
+ if (index < 0 || index >= vertexCount) {
+ return null;
+ }
+ triangleVertices[counter * 3] = vertices[index * 5];
+ triangleVertices[counter * 3 + 1] = vertices[index * 5 + 1];
+ triangleVertices[counter * 3 + 2] = vertices[index * 5 + 2];
+ textureCoords[counter * 2] = vertices[index * 5 + 3];
+ textureCoords[counter * 2 + 1] = vertices[index * 5 + 4];
+ }
+ subMeshes[i] = new SubMesh(textureId, triangleVertices, textureCoords, drawMode);
+ }
+ return new Mesh(subMeshes);
+ }
+
+ /**
+ * Decodes Zigzag encoding as described in
+ * https://developers.google.com/protocol-buffers/docs/encoding#signed-integers
+ */
+ private static int decodeZigZag(int n) {
+ return (n >> 1) ^ -(n & 1);
+ }
+}
diff --git a/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/package-info.java b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/package-info.java
new file mode 100644
index 0000000000..7ab7fced0b
--- /dev/null
+++ b/mobile/android/exoplayer2/src/main/java/org/mozilla/thirdparty/com/google/android/exoplayer2/video/spherical/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@NonNullApi
+package org.mozilla.thirdparty.com.google.android.exoplayer2.video.spherical;
+
+import org.mozilla.thirdparty.com.google.android.exoplayer2.util.NonNullApi;