summaryrefslogtreecommitdiffstats
path: root/dom/media/platforms/android
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/platforms/android')
-rw-r--r--dom/media/platforms/android/AndroidDataEncoder.cpp505
-rw-r--r--dom/media/platforms/android/AndroidDataEncoder.h121
-rw-r--r--dom/media/platforms/android/AndroidDecoderModule.cpp329
-rw-r--r--dom/media/platforms/android/AndroidDecoderModule.h75
-rw-r--r--dom/media/platforms/android/AndroidEncoderModule.cpp47
-rw-r--r--dom/media/platforms/android/AndroidEncoderModule.h29
-rw-r--r--dom/media/platforms/android/JavaCallbacksSupport.h73
-rw-r--r--dom/media/platforms/android/RemoteDataDecoder.cpp1184
-rw-r--r--dom/media/platforms/android/RemoteDataDecoder.h112
9 files changed, 2475 insertions, 0 deletions
diff --git a/dom/media/platforms/android/AndroidDataEncoder.cpp b/dom/media/platforms/android/AndroidDataEncoder.cpp
new file mode 100644
index 0000000000..6aa1f23614
--- /dev/null
+++ b/dom/media/platforms/android/AndroidDataEncoder.cpp
@@ -0,0 +1,505 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "AndroidDataEncoder.h"
+
+#include "AnnexB.h"
+#include "H264.h"
+#include "MediaData.h"
+#include "MediaInfo.h"
+
+#include "ImageContainer.h"
+#include "libyuv/convert_from.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Unused.h"
+#include "nsThreadUtils.h"
+
+namespace mozilla {
+
+extern LazyLogModule sPEMLog;
+#define AND_ENC_LOG(arg, ...) \
+ MOZ_LOG(sPEMLog, mozilla::LogLevel::Debug, \
+ ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define AND_ENC_LOGE(arg, ...) \
+ MOZ_LOG(sPEMLog, mozilla::LogLevel::Error, \
+ ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+#define REJECT_IF_ERROR() \
+ do { \
+ if (mError) { \
+ auto error = mError.value(); \
+ mError.reset(); \
+ return EncodePromise::CreateAndReject(std::move(error), __func__); \
+ } \
+ } while (0)
+
+RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::Init() {
+ // Sanity-check the input size for Android software encoder fails to do it.
+ if (mConfig.mSize.width == 0 || mConfig.mSize.height == 0) {
+ return InitPromise::CreateAndReject(NS_ERROR_ILLEGAL_VALUE, __func__);
+ }
+
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &AndroidDataEncoder::ProcessInit);
+}
+
+static const char* MimeTypeOf(CodecType aCodec) {
+ switch (aCodec) {
+ case CodecType::H264:
+ return "video/avc";
+ case CodecType::VP8:
+ return "video/x-vnd.on2.vp8";
+ case CodecType::VP9:
+ return "video/x-vnd.on2.vp9";
+ default:
+ return "";
+ }
+}
+
+using FormatResult = Result<java::sdk::MediaFormat::LocalRef, MediaResult>;
+
+FormatResult ToMediaFormat(const EncoderConfig& aConfig) {
+ nsresult rv = NS_OK;
+ java::sdk::MediaFormat::LocalRef format;
+ rv = java::sdk::MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodec),
+ aConfig.mSize.width,
+ aConfig.mSize.height, &format);
+ NS_ENSURE_SUCCESS(
+ rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to create Java MediaFormat object")));
+
+ rv =
+ format->SetInteger(java::sdk::MediaFormat::KEY_BITRATE_MODE, 2 /* CBR */);
+ NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to set bitrate mode")));
+
+ rv = format->SetInteger(java::sdk::MediaFormat::KEY_BIT_RATE,
+ AssertedCast<int>(aConfig.mBitrate));
+ NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to set bitrate")));
+
+ // COLOR_FormatYUV420SemiPlanar(NV12) is the most widely supported
+ // format.
+ rv = format->SetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT, 0x15);
+ NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to set color format")));
+
+ rv = format->SetInteger(java::sdk::MediaFormat::KEY_FRAME_RATE,
+ aConfig.mFramerate);
+ NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to set frame rate")));
+
+ // Ensure interval >= 1. A negative value means no key frames are
+ // requested after the first frame. A zero value means a stream
+ // containing all key frames is requested.
+ int32_t intervalInSec = AssertedCast<int32_t>(
+ std::max<size_t>(1, aConfig.mKeyframeInterval / aConfig.mFramerate));
+ rv = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL,
+ intervalInSec);
+ NS_ENSURE_SUCCESS(rv,
+ FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "fail to set I-frame interval")));
+
+ return format;
+}
+
+RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
+ AssertOnTaskQueue();
+ MOZ_ASSERT(!mJavaEncoder);
+
+ java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
+ if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
+ !bufferInfo) {
+ return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
+ }
+ mInputBufferInfo = bufferInfo;
+
+ FormatResult result = ToMediaFormat(mConfig);
+ if (result.isErr()) {
+ return InitPromise::CreateAndReject(result.unwrapErr(), __func__);
+ }
+ mFormat = result.unwrap();
+
+ // Register native methods.
+ JavaCallbacksSupport::Init();
+
+ mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
+ if (!mJavaCallbacks) {
+ return InitPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "cannot create Java callback object"),
+ __func__);
+ }
+ JavaCallbacksSupport::AttachNative(
+ mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
+
+ mJavaEncoder = java::CodecProxy::Create(true /* encoder */, mFormat, nullptr,
+ mJavaCallbacks, u""_ns);
+ if (!mJavaEncoder) {
+ return InitPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "cannot create Java encoder object"),
+ __func__);
+ }
+
+ mIsHardwareAccelerated = mJavaEncoder->IsHardwareAccelerated();
+ mDrainState = DrainState::DRAINABLE;
+
+ return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
+}
+
+RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Encode(
+ const MediaData* aSample) {
+ RefPtr<AndroidDataEncoder> self = this;
+ MOZ_ASSERT(aSample != nullptr);
+
+ RefPtr<const MediaData> sample(aSample);
+ return InvokeAsync(mTaskQueue, __func__,
+ [self, sample]() { return self->ProcessEncode(sample); });
+}
+
+static jni::ByteBuffer::LocalRef ConvertI420ToNV12Buffer(
+ RefPtr<const VideoData>& aSample, RefPtr<MediaByteBuffer>& aYUVBuffer,
+ int aStride, int aYPlaneHeight) {
+ const layers::PlanarYCbCrImage* image = aSample->mImage->AsPlanarYCbCrImage();
+ MOZ_ASSERT(image);
+ const layers::PlanarYCbCrData* yuv = image->GetData();
+ auto ySize = yuv->YDataSize();
+ auto cbcrSize = yuv->CbCrDataSize();
+ // If we have a stride or height passed in from the Codec we need to use
+ // those.
+ auto yStride = aStride != 0 ? aStride : yuv->mYStride;
+ auto height = aYPlaneHeight != 0 ? aYPlaneHeight : ySize.height;
+ size_t yLength = yStride * height;
+ size_t length =
+ yLength + yStride * (cbcrSize.height - 1) + cbcrSize.width * 2;
+
+ if (!aYUVBuffer || aYUVBuffer->Capacity() < length) {
+ aYUVBuffer = MakeRefPtr<MediaByteBuffer>(length);
+ aYUVBuffer->SetLength(length);
+ } else {
+ MOZ_ASSERT(aYUVBuffer->Length() >= length);
+ }
+
+ if (libyuv::I420ToNV12(yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
+ yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
+ aYUVBuffer->Elements(), yStride,
+ aYUVBuffer->Elements() + yLength, yStride, ySize.width,
+ ySize.height) != 0) {
+ return nullptr;
+ }
+
+ return jni::ByteBuffer::New(aYUVBuffer->Elements(), aYUVBuffer->Length());
+}
+
+RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessEncode(
+ const RefPtr<const MediaData>& aSample) {
+ AssertOnTaskQueue();
+
+ REJECT_IF_ERROR();
+
+ RefPtr<const VideoData> sample(aSample->As<const VideoData>());
+ MOZ_ASSERT(sample);
+
+ // Bug 1789846: Check with the Encoder if MediaCodec has a stride or height
+ // value to use.
+ jni::ByteBuffer::LocalRef buffer = ConvertI420ToNV12Buffer(
+ sample, mYUVBuffer, mJavaEncoder->GetInputFormatStride(),
+ mJavaEncoder->GetInputFormatYPlaneHeight());
+ if (!buffer) {
+ return EncodePromise::CreateAndReject(NS_ERROR_ILLEGAL_INPUT, __func__);
+ }
+
+ if (aSample->mKeyframe) {
+ mInputBufferInfo->Set(0, AssertedCast<int32_t>(mYUVBuffer->Length()),
+ aSample->mTime.ToMicroseconds(),
+ java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME);
+ } else {
+ mInputBufferInfo->Set(0, AssertedCast<int32_t>(mYUVBuffer->Length()),
+ aSample->mTime.ToMicroseconds(), 0);
+ }
+
+ mJavaEncoder->Input(buffer, mInputBufferInfo, nullptr);
+
+ if (mEncodedData.Length() > 0) {
+ EncodedData pending = std::move(mEncodedData);
+ return EncodePromise::CreateAndResolve(std::move(pending), __func__);
+ }
+ return EncodePromise::CreateAndResolve(EncodedData(), __func__);
+}
+
+class AutoRelease final {
+ public:
+ AutoRelease(java::CodecProxy::Param aEncoder, java::Sample::Param aSample)
+ : mEncoder(aEncoder), mSample(aSample) {}
+
+ ~AutoRelease() { mEncoder->ReleaseOutput(mSample, false); }
+
+ private:
+ java::CodecProxy::GlobalRef mEncoder;
+ java::Sample::GlobalRef mSample;
+};
+
+static RefPtr<MediaByteBuffer> ExtractCodecConfig(
+ java::SampleBuffer::Param aBuffer, const int32_t aOffset,
+ const int32_t aSize, const bool aAsAnnexB) {
+ auto annexB = MakeRefPtr<MediaByteBuffer>(aSize);
+ annexB->SetLength(aSize);
+ jni::ByteBuffer::LocalRef dest =
+ jni::ByteBuffer::New(annexB->Elements(), aSize);
+ aBuffer->WriteToByteBuffer(dest, aOffset, aSize);
+ if (aAsAnnexB) {
+ return annexB;
+ }
+ // Convert to avcC.
+ nsTArray<AnnexB::NALEntry> paramSets;
+ AnnexB::ParseNALEntries(
+ Span<const uint8_t>(annexB->Elements(), annexB->Length()), paramSets);
+
+ auto avcc = MakeRefPtr<MediaByteBuffer>();
+ AnnexB::NALEntry& sps = paramSets.ElementAt(0);
+ AnnexB::NALEntry& pps = paramSets.ElementAt(1);
+ const uint8_t* spsPtr = annexB->Elements() + sps.mOffset;
+ H264::WriteExtraData(
+ avcc, spsPtr[1], spsPtr[2], spsPtr[3],
+ Span<const uint8_t>(spsPtr, sps.mSize),
+ Span<const uint8_t>(annexB->Elements() + pps.mOffset, pps.mSize));
+ return avcc;
+}
+
+void AndroidDataEncoder::ProcessOutput(
+ java::Sample::GlobalRef&& aSample,
+ java::SampleBuffer::GlobalRef&& aBuffer) {
+ if (!mTaskQueue->IsCurrentThreadIn()) {
+ nsresult rv =
+ mTaskQueue->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
+ java::SampleBuffer::GlobalRef&&>(
+ "AndroidDataEncoder::ProcessOutput", this,
+ &AndroidDataEncoder::ProcessOutput, std::move(aSample),
+ std::move(aBuffer)));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ AssertOnTaskQueue();
+
+ if (!mJavaEncoder) {
+ return;
+ }
+
+ AutoRelease releaseSample(mJavaEncoder, aSample);
+
+ java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
+ MOZ_ASSERT(info);
+
+ int32_t flags;
+ bool ok = NS_SUCCEEDED(info->Flags(&flags));
+ bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+
+ int32_t offset;
+ ok &= NS_SUCCEEDED(info->Offset(&offset));
+
+ int32_t size;
+ ok &= NS_SUCCEEDED(info->Size(&size));
+
+ int64_t presentationTimeUs;
+ ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
+
+ if (!ok) {
+ return;
+ }
+
+ if (size > 0) {
+ if ((flags & java::sdk::MediaCodec::BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ mConfigData = ExtractCodecConfig(aBuffer, offset, size,
+ mConfig.mUsage == Usage::Realtime);
+ return;
+ }
+ RefPtr<MediaRawData> output;
+ if (mConfig.mCodec == CodecType::H264) {
+ output = GetOutputDataH264(
+ aBuffer, offset, size,
+ !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME));
+ } else {
+ output = GetOutputData(
+ aBuffer, offset, size,
+ !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME));
+ }
+ output->mEOS = isEOS;
+ output->mTime = media::TimeUnit::FromMicroseconds(presentationTimeUs);
+ mEncodedData.AppendElement(std::move(output));
+ }
+
+ if (isEOS) {
+ mDrainState = DrainState::DRAINED;
+ }
+ if (!mDrainPromise.IsEmpty()) {
+ EncodedData pending = std::move(mEncodedData);
+ mDrainPromise.Resolve(std::move(pending), __func__);
+ }
+}
+
+RefPtr<MediaRawData> AndroidDataEncoder::GetOutputData(
+ java::SampleBuffer::Param aBuffer, const int32_t aOffset,
+ const int32_t aSize, const bool aIsKeyFrame) {
+ // Copy frame data from Java buffer.
+ auto output = MakeRefPtr<MediaRawData>();
+ UniquePtr<MediaRawDataWriter> writer(output->CreateWriter());
+ if (!writer->SetSize(aSize)) {
+ AND_ENC_LOGE("fail to allocate output buffer");
+ return nullptr;
+ }
+
+ jni::ByteBuffer::LocalRef buf = jni::ByteBuffer::New(writer->Data(), aSize);
+ aBuffer->WriteToByteBuffer(buf, aOffset, aSize);
+ output->mKeyframe = aIsKeyFrame;
+
+ return output;
+}
+
+// AVC/H.264 frame can be in avcC or Annex B and needs extra conversion steps.
+RefPtr<MediaRawData> AndroidDataEncoder::GetOutputDataH264(
+ java::SampleBuffer::Param aBuffer, const int32_t aOffset,
+ const int32_t aSize, const bool aIsKeyFrame) {
+ auto output = MakeRefPtr<MediaRawData>();
+
+ size_t prependSize = 0;
+ RefPtr<MediaByteBuffer> avccHeader;
+ if (aIsKeyFrame && mConfigData) {
+ if (mConfig.mUsage == Usage::Realtime) {
+ prependSize = mConfigData->Length();
+ } else {
+ avccHeader = mConfigData;
+ }
+ }
+
+ UniquePtr<MediaRawDataWriter> writer(output->CreateWriter());
+ if (!writer->SetSize(prependSize + aSize)) {
+ AND_ENC_LOGE("fail to allocate output buffer");
+ return nullptr;
+ }
+
+ if (prependSize > 0) {
+ PodCopy(writer->Data(), mConfigData->Elements(), prependSize);
+ }
+
+ jni::ByteBuffer::LocalRef buf =
+ jni::ByteBuffer::New(writer->Data() + prependSize, aSize);
+ aBuffer->WriteToByteBuffer(buf, aOffset, aSize);
+
+ if (mConfig.mUsage != Usage::Realtime &&
+ !AnnexB::ConvertSampleToAVCC(output, avccHeader)) {
+ AND_ENC_LOGE("fail to convert annex-b sample to AVCC");
+ return nullptr;
+ }
+
+ output->mKeyframe = aIsKeyFrame;
+
+ return output;
+}
+
+RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Drain() {
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &AndroidDataEncoder::ProcessDrain);
+}
+
+RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessDrain() {
+ AssertOnTaskQueue();
+ MOZ_ASSERT(mJavaEncoder);
+ MOZ_ASSERT(mDrainPromise.IsEmpty());
+
+ REJECT_IF_ERROR();
+
+ switch (mDrainState) {
+ case DrainState::DRAINABLE:
+ mInputBufferInfo->Set(0, 0, -1,
+ java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+ mJavaEncoder->Input(nullptr, mInputBufferInfo, nullptr);
+ mDrainState = DrainState::DRAINING;
+ [[fallthrough]];
+ case DrainState::DRAINING:
+ if (mEncodedData.IsEmpty()) {
+ return mDrainPromise.Ensure(__func__); // Pending promise.
+ }
+ [[fallthrough]];
+ case DrainState::DRAINED:
+ if (mEncodedData.Length() > 0) {
+ EncodedData pending = std::move(mEncodedData);
+ return EncodePromise::CreateAndResolve(std::move(pending), __func__);
+ } else {
+ return EncodePromise::CreateAndResolve(EncodedData(), __func__);
+ }
+ }
+}
+
+RefPtr<ShutdownPromise> AndroidDataEncoder::Shutdown() {
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &AndroidDataEncoder::ProcessShutdown);
+}
+
+RefPtr<ShutdownPromise> AndroidDataEncoder::ProcessShutdown() {
+ AssertOnTaskQueue();
+ if (mJavaEncoder) {
+ mJavaEncoder->Release();
+ mJavaEncoder = nullptr;
+ }
+
+ if (mJavaCallbacks) {
+ JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
+ JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
+ mJavaCallbacks = nullptr;
+ }
+
+ mFormat = nullptr;
+
+ return ShutdownPromise::CreateAndResolve(true, __func__);
+}
+
+RefPtr<GenericPromise> AndroidDataEncoder::SetBitrate(uint32_t aBitsPerSec) {
+ RefPtr<AndroidDataEncoder> self(this);
+ return InvokeAsync(mTaskQueue, __func__, [self, aBitsPerSec]() {
+ self->mJavaEncoder->SetBitrate(AssertedCast<int>(aBitsPerSec));
+ return GenericPromise::CreateAndResolve(true, __func__);
+ });
+}
+
+void AndroidDataEncoder::Error(const MediaResult& aError) {
+ if (!mTaskQueue->IsCurrentThreadIn()) {
+ nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>(
+ "AndroidDataEncoder::Error", this, &AndroidDataEncoder::Error, aError));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ AssertOnTaskQueue();
+
+ mError = Some(aError);
+}
+
+void AndroidDataEncoder::CallbacksSupport::HandleInput(int64_t aTimestamp,
+ bool aProcessed) {}
+
+void AndroidDataEncoder::CallbacksSupport::HandleOutput(
+ java::Sample::Param aSample, java::SampleBuffer::Param aBuffer) {
+ MutexAutoLock lock(mMutex);
+ if (mEncoder) {
+ mEncoder->ProcessOutput(aSample, aBuffer);
+ }
+}
+
+void AndroidDataEncoder::CallbacksSupport::HandleOutputFormatChanged(
+ java::sdk::MediaFormat::Param aFormat) {}
+
+void AndroidDataEncoder::CallbacksSupport::HandleError(
+ const MediaResult& aError) {
+ MutexAutoLock lock(mMutex);
+ if (mEncoder) {
+ mEncoder->Error(aError);
+ }
+}
+
+} // namespace mozilla
+
+#undef AND_ENC_LOG
+#undef AND_ENC_LOGE
diff --git a/dom/media/platforms/android/AndroidDataEncoder.h b/dom/media/platforms/android/AndroidDataEncoder.h
new file mode 100644
index 0000000000..ed1dfe7708
--- /dev/null
+++ b/dom/media/platforms/android/AndroidDataEncoder.h
@@ -0,0 +1,121 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORMS_ANDROID_ANDROIDDATAENCODER_H_
+#define DOM_MEDIA_PLATFORMS_ANDROID_ANDROIDDATAENCODER_H_
+
+#include "MediaData.h"
+#include "PlatformEncoderModule.h"
+
+#include "JavaCallbacksSupport.h"
+
+#include "mozilla/Maybe.h"
+#include "mozilla/Mutex.h"
+
+namespace mozilla {
+
+class AndroidDataEncoder final : public MediaDataEncoder {
+ public:
+ AndroidDataEncoder(const EncoderConfig& aConfig,
+ const RefPtr<TaskQueue>& aTaskQueue)
+ : mConfig(aConfig), mTaskQueue(aTaskQueue) {
+ MOZ_ASSERT(mConfig.mSize.width > 0 && mConfig.mSize.height > 0);
+ MOZ_ASSERT(mTaskQueue);
+ }
+ ~AndroidDataEncoder() { MOZ_ASSERT(!mJavaEncoder); }
+
+ RefPtr<InitPromise> Init() override;
+ RefPtr<EncodePromise> Encode(const MediaData* aSample) override;
+ RefPtr<EncodePromise> Drain() override;
+ RefPtr<ShutdownPromise> Shutdown() override;
+ RefPtr<GenericPromise> SetBitrate(uint32_t aBitsPerSec) override;
+ RefPtr<ReconfigurationPromise> Reconfigure(
+ const RefPtr<const EncoderConfigurationChangeList>& aConfigurationChanges)
+ override {
+ // General reconfiguration interface not implemented right now
+ return MediaDataEncoder::ReconfigurationPromise::CreateAndReject(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
+ };
+
+ nsCString GetDescriptionName() const override { return "Android Encoder"_ns; }
+
+ private:
+ class CallbacksSupport final : public JavaCallbacksSupport {
+ public:
+ explicit CallbacksSupport(AndroidDataEncoder* aEncoder)
+ : mMutex("AndroidDataEncoder::CallbacksSupport") {
+ MutexAutoLock lock(mMutex);
+ mEncoder = aEncoder;
+ }
+
+ ~CallbacksSupport() {
+ MutexAutoLock lock(mMutex);
+ mEncoder = nullptr;
+ }
+
+ void HandleInput(int64_t aTimestamp, bool aProcessed) override;
+ void HandleOutput(java::Sample::Param aSample,
+ java::SampleBuffer::Param aBuffer) override;
+ void HandleOutputFormatChanged(
+ java::sdk::MediaFormat::Param aFormat) override;
+ void HandleError(const MediaResult& aError) override;
+
+ private:
+ Mutex mMutex;
+ AndroidDataEncoder* mEncoder MOZ_GUARDED_BY(mMutex);
+ };
+ friend class CallbacksSupport;
+
+ // Methods only called on mTaskQueue.
+ RefPtr<InitPromise> ProcessInit();
+ RefPtr<EncodePromise> ProcessEncode(const RefPtr<const MediaData>& aSample);
+ RefPtr<EncodePromise> ProcessDrain();
+ RefPtr<ShutdownPromise> ProcessShutdown();
+ void ProcessInput();
+ void ProcessOutput(java::Sample::GlobalRef&& aSample,
+ java::SampleBuffer::GlobalRef&& aBuffer);
+ RefPtr<MediaRawData> GetOutputData(java::SampleBuffer::Param aBuffer,
+ const int32_t aOffset, const int32_t aSize,
+ const bool aIsKeyFrame);
+ RefPtr<MediaRawData> GetOutputDataH264(java::SampleBuffer::Param aBuffer,
+ const int32_t aOffset,
+ const int32_t aSize,
+ const bool aIsKeyFrame);
+ void Error(const MediaResult& aError);
+
+ void AssertOnTaskQueue() const {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ }
+
+ EncoderConfig mConfig;
+
+ RefPtr<TaskQueue> mTaskQueue;
+
+ // Can be accessed on any thread, but only written on during init.
+ bool mIsHardwareAccelerated = false;
+
+ java::CodecProxy::GlobalRef mJavaEncoder;
+ java::CodecProxy::NativeCallbacks::GlobalRef mJavaCallbacks;
+ java::sdk::MediaFormat::GlobalRef mFormat;
+ // Preallocated Java object used as a reusable storage for input buffer
+ // information. Contents must be changed only on mTaskQueue.
+ java::sdk::MediaCodec::BufferInfo::GlobalRef mInputBufferInfo;
+
+ MozPromiseHolder<EncodePromise> mDrainPromise;
+
+ // Accessed on mTaskqueue only.
+ RefPtr<MediaByteBuffer> mYUVBuffer;
+ EncodedData mEncodedData;
+ // SPS/PPS NALUs for realtime usage, avcC otherwise.
+ RefPtr<MediaByteBuffer> mConfigData;
+
+ enum class DrainState { DRAINABLE, DRAINING, DRAINED };
+ DrainState mDrainState = DrainState::DRAINABLE;
+
+ Maybe<MediaResult> mError;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/android/AndroidDecoderModule.cpp b/dom/media/platforms/android/AndroidDecoderModule.cpp
new file mode 100644
index 0000000000..fff8669a74
--- /dev/null
+++ b/dom/media/platforms/android/AndroidDecoderModule.cpp
@@ -0,0 +1,329 @@
+//* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <jni.h>
+
+#ifdef MOZ_AV1
+# include "AOMDecoder.h"
+#endif
+#include "MediaInfo.h"
+#include "RemoteDataDecoder.h"
+#include "TheoraDecoder.h"
+#include "VPXDecoder.h"
+#include "mozilla/ClearOnShutdown.h"
+#include "mozilla/Components.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/gfx/gfxVars.h"
+#include "mozilla/java/HardwareCodecCapabilityUtilsWrappers.h"
+#include "nsIGfxInfo.h"
+#include "nsPromiseFlatString.h"
+#include "prlog.h"
+
+#undef LOG
+#define LOG(arg, ...) \
+ MOZ_LOG( \
+ sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
+ ("AndroidDecoderModule(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define SLOG(arg, ...) \
+ MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
+ ("%s: " arg, __func__, ##__VA_ARGS__))
+
+using namespace mozilla;
+using media::DecodeSupport;
+using media::DecodeSupportSet;
+using media::MCSInfo;
+using media::MediaCodec;
+using media::MediaCodecsSupport;
+using media::MediaCodecsSupported;
+using media::TimeUnit;
+
+namespace mozilla {
+
+mozilla::LazyLogModule sAndroidDecoderModuleLog("AndroidDecoderModule");
+
+nsCString TranslateMimeType(const nsACString& aMimeType) {
+ if (VPXDecoder::IsVPX(aMimeType, VPXDecoder::VP8)) {
+ static constexpr auto vp8 = "video/x-vnd.on2.vp8"_ns;
+ return vp8;
+ }
+ if (VPXDecoder::IsVPX(aMimeType, VPXDecoder::VP9)) {
+ static constexpr auto vp9 = "video/x-vnd.on2.vp9"_ns;
+ return vp9;
+ }
+ if (aMimeType.EqualsLiteral("video/av1")) {
+ static constexpr auto av1 = "video/av01"_ns;
+ return av1;
+ }
+ return nsCString(aMimeType);
+}
+
+AndroidDecoderModule::AndroidDecoderModule(CDMProxy* aProxy) {
+ mProxy = static_cast<MediaDrmCDMProxy*>(aProxy);
+}
+
+StaticAutoPtr<nsTArray<nsCString>> AndroidDecoderModule::sSupportedSwMimeTypes;
+StaticAutoPtr<nsTArray<nsCString>> AndroidDecoderModule::sSupportedHwMimeTypes;
+StaticAutoPtr<MediaCodecsSupported> AndroidDecoderModule::sSupportedCodecs;
+
+/* static */
+media::MediaCodecsSupported AndroidDecoderModule::GetSupportedCodecs() {
+ if (!sSupportedSwMimeTypes || !sSupportedHwMimeTypes || !sSupportedCodecs) {
+ SetSupportedMimeTypes();
+ }
+ return *sSupportedCodecs;
+}
+
+DecodeSupportSet AndroidDecoderModule::SupportsMimeType(
+ const nsACString& aMimeType) {
+ if (!sSupportedSwMimeTypes) {
+ SetSupportedMimeTypes();
+ }
+
+ // Handle per-codec logic if the codec type can be determined from
+ // the MIME type string. GetMediaCodecFromMimeType should handle every
+ // type string that was hardcoded in this function previously.
+ MediaCodec codec = MCSInfo::GetMediaCodecFromMimeType(aMimeType);
+ switch (codec) {
+ case MediaCodec::VP8:
+ if (!gfx::gfxVars::UseVP8HwDecode()) {
+ return media::DecodeSupportSet{};
+ }
+ break;
+
+ case MediaCodec::VP9:
+ if (!gfx::gfxVars::UseVP9HwDecode()) {
+ return media::DecodeSupportSet{};
+ }
+ break;
+
+ // Prefer the gecko decoder for theora/opus/vorbis; stagefright crashes
+ // on content demuxed from mp4.
+ // Not all android devices support FLAC/theora even when they say they do.
+ case MediaCodec::Theora:
+ SLOG("Rejecting video of type %s", aMimeType.Data());
+ return media::DecodeSupportSet{};
+ // Always use our own software decoder (in ffvpx) for audio except for AAC
+ case MediaCodec::MP3:
+ [[fallthrough]];
+ case MediaCodec::Opus:
+ [[fallthrough]];
+ case MediaCodec::Vorbis:
+ [[fallthrough]];
+ case MediaCodec::Wave:
+ [[fallthrough]];
+ case MediaCodec::FLAC:
+ SLOG("Rejecting audio of type %s", aMimeType.Data());
+ return media::DecodeSupportSet{};
+
+ // H264 always reports software decode
+ case MediaCodec::H264:
+ return DecodeSupport::SoftwareDecode;
+
+ // AV1 doesn't need any special handling.
+ case MediaCodec::AV1:
+ break;
+
+ case MediaCodec::SENTINEL:
+ [[fallthrough]];
+ default:
+ SLOG("Support check using default logic for %s", aMimeType.Data());
+ break;
+ }
+
+ // If a codec has no special handling or can't be determined from the
+ // MIME type string, check if the MIME type string itself is supported.
+ if (sSupportedHwMimeTypes &&
+ sSupportedHwMimeTypes->Contains(TranslateMimeType(aMimeType))) {
+ return DecodeSupport::HardwareDecode;
+ }
+ if (sSupportedSwMimeTypes &&
+ sSupportedSwMimeTypes->Contains(TranslateMimeType(aMimeType))) {
+ return DecodeSupport::SoftwareDecode;
+ }
+ return media::DecodeSupportSet{};
+}
+
+nsTArray<nsCString> AndroidDecoderModule::GetSupportedMimeTypes() {
+ mozilla::jni::ObjectArray::LocalRef supportedTypes = mozilla::java::
+ HardwareCodecCapabilityUtils::GetDecoderSupportedMimeTypes();
+
+ nsTArray<nsCString> st = nsTArray<nsCString>();
+ for (size_t i = 0; i < supportedTypes->Length(); i++) {
+ st.AppendElement(
+ jni::String::LocalRef(supportedTypes->GetElement(i))->ToCString());
+ }
+
+ return st;
+}
+
+nsTArray<nsCString> AndroidDecoderModule::GetSupportedMimeTypesPrefixed() {
+ mozilla::jni::ObjectArray::LocalRef supportedTypes = mozilla::java::
+ HardwareCodecCapabilityUtils::GetDecoderSupportedMimeTypesWithAccelInfo();
+
+ nsTArray<nsCString> st = nsTArray<nsCString>();
+ for (size_t i = 0; i < supportedTypes->Length(); i++) {
+ st.AppendElement(
+ jni::String::LocalRef(supportedTypes->GetElement(i))->ToCString());
+ }
+
+ return st;
+}
+
+void AndroidDecoderModule::SetSupportedMimeTypes() {
+ SetSupportedMimeTypes(GetSupportedMimeTypesPrefixed());
+}
+
+// Inbound MIME types prefixed with SW/HW need to be processed
+void AndroidDecoderModule::SetSupportedMimeTypes(
+ nsTArray<nsCString>&& aSupportedTypes) {
+ // Return if support is already cached
+ if (sSupportedSwMimeTypes && sSupportedHwMimeTypes && sSupportedCodecs) {
+ return;
+ }
+ if (!sSupportedSwMimeTypes) {
+ sSupportedSwMimeTypes = new nsTArray<nsCString>;
+ ClearOnShutdown(&sSupportedSwMimeTypes);
+ }
+ if (!sSupportedHwMimeTypes) {
+ sSupportedHwMimeTypes = new nsTArray<nsCString>;
+ ClearOnShutdown(&sSupportedHwMimeTypes);
+ }
+ if (!sSupportedCodecs) {
+ sSupportedCodecs = new MediaCodecsSupported();
+ ClearOnShutdown(&sSupportedCodecs);
+ }
+
+ DecodeSupportSet support;
+ // Process each MIME type string
+ for (const auto& s : aSupportedTypes) {
+ // Verify MIME type string present
+ if (s.Length() < 4) {
+ SLOG("No SW/HW support prefix found in codec string %s", s.Data());
+ continue;
+ }
+ const auto mimeType = Substring(s, 3);
+ if (mimeType.Length() == 0) {
+ SLOG("No MIME type information found in codec string %s", s.Data());
+ continue;
+ }
+
+ // Extract SW/HW support prefix
+ const auto caps = Substring(s, 0, 2);
+ if (caps == "SW"_ns) {
+ sSupportedSwMimeTypes->AppendElement(mimeType);
+ support += DecodeSupport::SoftwareDecode;
+ } else if (caps == "HW"_ns) {
+ sSupportedHwMimeTypes->AppendElement(mimeType);
+ support += DecodeSupport::HardwareDecode;
+ } else {
+ SLOG("Error parsing acceleration info from JNI codec string %s",
+ s.Data());
+ continue;
+ }
+ const MediaCodec codec = MCSInfo::GetMediaCodecFromMimeType(mimeType);
+ if (codec == MediaCodec::SENTINEL) {
+ SLOG("Did not parse string %s to specific codec", s.Data());
+ continue;
+ }
+ *sSupportedCodecs += MCSInfo::GetMediaCodecsSupportEnum(codec, support);
+ }
+}
+
+DecodeSupportSet AndroidDecoderModule::SupportsMimeType(
+ const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
+ return AndroidDecoderModule::SupportsMimeType(aMimeType);
+}
+
+bool AndroidDecoderModule::SupportsColorDepth(
+ gfx::ColorDepth aColorDepth, DecoderDoctorDiagnostics* aDiagnostics) const {
+ // 10-bit support is codec dependent so this is not entirely accurate.
+ // Supports() will correct it.
+ return aColorDepth == gfx::ColorDepth::COLOR_8 ||
+ aColorDepth == gfx::ColorDepth::COLOR_10;
+}
+
+// Further check is needed because the base class uses the inaccurate
+// SupportsColorDepth().
+media::DecodeSupportSet AndroidDecoderModule::Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const {
+ media::DecodeSupportSet support =
+ PlatformDecoderModule::Supports(aParams, aDiagnostics);
+
+ // Short-circuit.
+ if (support.isEmpty()) {
+ return support;
+ }
+
+#ifdef MOZ_AV1
+ // For AV1, only allow HW decoder.
+ if (AOMDecoder::IsAV1(aParams.MimeType()) &&
+ (!StaticPrefs::media_av1_enabled() ||
+ !support.contains(media::DecodeSupport::HardwareDecode))) {
+ return media::DecodeSupportSet{};
+ }
+#endif
+
+ // Check 10-bit video.
+ const TrackInfo& trackInfo = aParams.mConfig;
+ const VideoInfo* videoInfo = trackInfo.GetAsVideoInfo();
+ if (!videoInfo || videoInfo->mColorDepth != gfx::ColorDepth::COLOR_10) {
+ return support;
+ }
+
+ return java::HardwareCodecCapabilityUtils::Decodes10Bit(
+ TranslateMimeType(aParams.MimeType()))
+ ? support
+ : media::DecodeSupportSet{};
+}
+
+already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateVideoDecoder(
+ const CreateDecoderParams& aParams) {
+ // Temporary - forces use of VPXDecoder when alpha is present.
+ // Bug 1263836 will handle alpha scenario once implemented. It will shift
+ // the check for alpha to PDMFactory but not itself remove the need for a
+ // check.
+ if (aParams.VideoConfig().HasAlpha()) {
+ return nullptr;
+ }
+
+ nsString drmStubId;
+ if (mProxy) {
+ drmStubId = mProxy->GetMediaDrmStubId();
+ }
+
+ RefPtr<MediaDataDecoder> decoder =
+ RemoteDataDecoder::CreateVideoDecoder(aParams, drmStubId, mProxy);
+ return decoder.forget();
+}
+
+already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateAudioDecoder(
+ const CreateDecoderParams& aParams) {
+ const AudioInfo& config = aParams.AudioConfig();
+ if (config.mBitDepth != 16) {
+ // We only handle 16-bit audio.
+ return nullptr;
+ }
+
+ LOG("CreateAudioFormat with mimeType=%s, mRate=%d, channels=%d",
+ config.mMimeType.Data(), config.mRate, config.mChannels);
+
+ nsString drmStubId;
+ if (mProxy) {
+ drmStubId = mProxy->GetMediaDrmStubId();
+ }
+ RefPtr<MediaDataDecoder> decoder =
+ RemoteDataDecoder::CreateAudioDecoder(aParams, drmStubId, mProxy);
+ return decoder.forget();
+}
+
+/* static */
+already_AddRefed<PlatformDecoderModule> AndroidDecoderModule::Create(
+ CDMProxy* aProxy) {
+ return MakeAndAddRef<AndroidDecoderModule>(aProxy);
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/android/AndroidDecoderModule.h b/dom/media/platforms/android/AndroidDecoderModule.h
new file mode 100644
index 0000000000..37a0f08588
--- /dev/null
+++ b/dom/media/platforms/android/AndroidDecoderModule.h
@@ -0,0 +1,75 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef AndroidDecoderModule_h_
+#define AndroidDecoderModule_h_
+
+#include "MediaCodecsSupport.h"
+#include "PlatformDecoderModule.h"
+#include "mozilla/MediaDrmCDMProxy.h"
+#include "mozilla/StaticPtr.h" // for StaticAutoPtr
+
+namespace mozilla {
+
+class AndroidDecoderModule : public PlatformDecoderModule {
+ template <typename T, typename... Args>
+ friend already_AddRefed<T> MakeAndAddRef(Args&&...);
+
+ public:
+ static already_AddRefed<PlatformDecoderModule> Create(
+ CDMProxy* aProxy = nullptr);
+
+ already_AddRefed<MediaDataDecoder> CreateVideoDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ already_AddRefed<MediaDataDecoder> CreateAudioDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ media::DecodeSupportSet SupportsMimeType(
+ const nsACString& aMimeType,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ static media::DecodeSupportSet SupportsMimeType(const nsACString& aMimeType);
+
+ static nsTArray<nsCString> GetSupportedMimeTypes();
+ // Like GetSupportedMimeTypes, but adds SW/HW prefix to indicate accel support
+ static nsTArray<nsCString> GetSupportedMimeTypesPrefixed();
+
+ static void SetSupportedMimeTypes();
+ static void SetSupportedMimeTypes(nsTArray<nsCString>&& aSupportedTypes);
+
+ media::DecodeSupportSet Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ // Return supported codecs (querying via JNI if not already cached)
+ static media::MediaCodecsSupported GetSupportedCodecs();
+
+ protected:
+ bool SupportsColorDepth(
+ gfx::ColorDepth aColorDepth,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ private:
+ explicit AndroidDecoderModule(CDMProxy* aProxy = nullptr);
+ virtual ~AndroidDecoderModule() = default;
+ RefPtr<MediaDrmCDMProxy> mProxy;
+ // SW compatible MIME type strings
+ static StaticAutoPtr<nsTArray<nsCString>> sSupportedSwMimeTypes;
+ // HW compatible MIME type strings
+ static StaticAutoPtr<nsTArray<nsCString>> sSupportedHwMimeTypes;
+ // EnumSet containing SW/HW codec support information parsed from
+ // MIME type strings. If a specific codec could not be determined
+ // it will not be included in this EnumSet. All supported MIME type strings
+ // are still stored in sSupportedSwMimeTypes and sSupportedHwMimeTypes.
+ static StaticAutoPtr<media::MediaCodecsSupported> sSupportedCodecs;
+};
+
+extern LazyLogModule sAndroidDecoderModuleLog;
+
+nsCString TranslateMimeType(const nsACString& aMimeType);
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/android/AndroidEncoderModule.cpp b/dom/media/platforms/android/AndroidEncoderModule.cpp
new file mode 100644
index 0000000000..15b23330e2
--- /dev/null
+++ b/dom/media/platforms/android/AndroidEncoderModule.cpp
@@ -0,0 +1,47 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "AndroidEncoderModule.h"
+
+#include "AndroidDataEncoder.h"
+
+#include "mozilla/Logging.h"
+#include "mozilla/java/HardwareCodecCapabilityUtilsWrappers.h"
+
+namespace mozilla {
+extern LazyLogModule sPEMLog;
+#define AND_PEM_LOG(arg, ...) \
+ MOZ_LOG( \
+ sPEMLog, mozilla::LogLevel::Debug, \
+ ("AndroidEncoderModule(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+bool AndroidEncoderModule::SupportsCodec(CodecType aCodec) const {
+ return (aCodec == CodecType::H264 &&
+ java::HardwareCodecCapabilityUtils::HasHWH264(true /* encoder */)) ||
+ (aCodec == CodecType::VP8 &&
+ java::HardwareCodecCapabilityUtils::HasHWVP8(true /* encoder */)) ||
+ (aCodec == CodecType::VP9 &&
+ java::HardwareCodecCapabilityUtils::HasHWVP9(true /* encoder */));
+}
+
+bool AndroidEncoderModule::Supports(const EncoderConfig& aConfig) const {
+ if (!CanLikelyEncode(aConfig)) {
+ return false;
+ }
+ return SupportsCodec(aConfig.mCodec);
+}
+
+already_AddRefed<MediaDataEncoder> AndroidEncoderModule::CreateVideoEncoder(
+ const EncoderConfig& aConfig, const RefPtr<TaskQueue>& aTaskQueue) const {
+ if (!Supports(aConfig)) {
+ AND_PEM_LOG("Unsupported codec type: %s",
+ GetCodecTypeString(aConfig.mCodec));
+ return nullptr;
+ }
+ return MakeRefPtr<AndroidDataEncoder>(aConfig, aTaskQueue).forget();
+}
+
+} // namespace mozilla
+
+#undef AND_PEM_LOG
diff --git a/dom/media/platforms/android/AndroidEncoderModule.h b/dom/media/platforms/android/AndroidEncoderModule.h
new file mode 100644
index 0000000000..846a299fe5
--- /dev/null
+++ b/dom/media/platforms/android/AndroidEncoderModule.h
@@ -0,0 +1,29 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORMS_ANDROID_ANDROIDENCODERMODULE_H_
+#define DOM_MEDIA_PLATFORMS_ANDROID_ANDROIDENCODERMODULE_H_
+
+#include "PlatformEncoderModule.h"
+
+namespace mozilla {
+
+class AndroidEncoderModule final : public PlatformEncoderModule {
+ public:
+ AndroidEncoderModule() = default;
+ virtual ~AndroidEncoderModule() = default;
+ // aCodec is the full codec string
+ bool Supports(const EncoderConfig& aConfig) const override;
+ bool SupportsCodec(CodecType aCodec) const override;
+
+ const char* GetName() const override { return "Android Encoder Module"; }
+
+ already_AddRefed<MediaDataEncoder> CreateVideoEncoder(
+ const EncoderConfig& aConfig,
+ const RefPtr<TaskQueue>& aTaskQueue) const override;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/android/JavaCallbacksSupport.h b/dom/media/platforms/android/JavaCallbacksSupport.h
new file mode 100644
index 0000000000..e79d796209
--- /dev/null
+++ b/dom/media/platforms/android/JavaCallbacksSupport.h
@@ -0,0 +1,73 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef JavaCallbacksSupport_h_
+#define JavaCallbacksSupport_h_
+
+#include "MediaResult.h"
+#include "MediaCodec.h"
+#include "mozilla/java/CodecProxyNatives.h"
+#include "mozilla/java/SampleBufferWrappers.h"
+#include "mozilla/java/SampleWrappers.h"
+
+namespace mozilla {
+
+class JavaCallbacksSupport
+ : public java::CodecProxy::NativeCallbacks::Natives<JavaCallbacksSupport> {
+ public:
+ typedef java::CodecProxy::NativeCallbacks::Natives<JavaCallbacksSupport> Base;
+ using Base::AttachNative;
+ using Base::DisposeNative;
+ using Base::GetNative;
+
+ JavaCallbacksSupport() : mCanceled(false) {}
+
+ virtual ~JavaCallbacksSupport() {}
+
+ virtual void HandleInput(int64_t aTimestamp, bool aProcessed) = 0;
+
+ void OnInputStatus(jlong aTimestamp, bool aProcessed) {
+ if (!mCanceled) {
+ HandleInput(aTimestamp, aProcessed);
+ }
+ }
+
+ virtual void HandleOutput(java::Sample::Param aSample,
+ java::SampleBuffer::Param aBuffer) = 0;
+
+ void OnOutput(jni::Object::Param aSample, jni::Object::Param aBuffer) {
+ if (!mCanceled) {
+ HandleOutput(java::Sample::Ref::From(aSample),
+ java::SampleBuffer::Ref::From(aBuffer));
+ }
+ }
+
+ virtual void HandleOutputFormatChanged(
+ java::sdk::MediaFormat::Param aFormat){};
+
+ void OnOutputFormatChanged(jni::Object::Param aFormat) {
+ if (!mCanceled) {
+ HandleOutputFormatChanged(java::sdk::MediaFormat::Ref::From(aFormat));
+ }
+ }
+
+ virtual void HandleError(const MediaResult& aError) = 0;
+
+ void OnError(bool aIsFatal) {
+ if (!mCanceled) {
+ HandleError(aIsFatal
+ ? MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__)
+ : MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, __func__));
+ }
+ }
+
+ void Cancel() { mCanceled = true; }
+
+ private:
+ Atomic<bool> mCanceled;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/android/RemoteDataDecoder.cpp b/dom/media/platforms/android/RemoteDataDecoder.cpp
new file mode 100644
index 0000000000..f0fbc7a77c
--- /dev/null
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -0,0 +1,1184 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "RemoteDataDecoder.h"
+
+#include <jni.h>
+
+#include "AndroidBridge.h"
+#include "AndroidBuild.h"
+#include "AndroidDecoderModule.h"
+#include "EMEDecoderModule.h"
+#include "GLImages.h"
+#include "JavaCallbacksSupport.h"
+#include "MediaCodec.h"
+#include "MediaData.h"
+#include "MediaInfo.h"
+#include "PerformanceRecorder.h"
+#include "SimpleMap.h"
+#include "VPXDecoder.h"
+#include "VideoUtils.h"
+#include "mozilla/fallible.h"
+#include "mozilla/gfx/Matrix.h"
+#include "mozilla/gfx/Types.h"
+#include "mozilla/java/CodecProxyWrappers.h"
+#include "mozilla/java/GeckoSurfaceWrappers.h"
+#include "mozilla/java/SampleBufferWrappers.h"
+#include "mozilla/java/SampleWrappers.h"
+#include "mozilla/java/SurfaceAllocatorWrappers.h"
+#include "mozilla/Maybe.h"
+#include "nsPromiseFlatString.h"
+#include "nsThreadUtils.h"
+#include "prlog.h"
+
+#undef LOG
+#define LOG(arg, ...) \
+ MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
+ ("RemoteDataDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+using namespace mozilla;
+using namespace mozilla::gl;
+using media::TimeUnit;
+
+namespace mozilla {
+
+// Hold a reference to the output buffer until we're ready to release it back to
+// the Java codec (for rendering or not).
+class RenderOrReleaseOutput {
+ public:
+ RenderOrReleaseOutput(java::CodecProxy::Param aCodec,
+ java::Sample::Param aSample)
+ : mCodec(aCodec), mSample(aSample) {}
+
+ virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
+
+ protected:
+ void ReleaseOutput(bool aToRender) {
+ if (mCodec && mSample) {
+ mCodec->ReleaseOutput(mSample, aToRender);
+ mCodec = nullptr;
+ mSample = nullptr;
+ }
+ }
+
+ private:
+ java::CodecProxy::GlobalRef mCodec;
+ java::Sample::GlobalRef mSample;
+};
+
+static bool areSmpte432ColorPrimariesBuggy() {
+ if (jni::GetAPIVersion() >= 34) {
+ const auto socManufacturer =
+ java::sdk::Build::SOC_MANUFACTURER()->ToString();
+ if (socManufacturer.EqualsASCII("Google")) {
+ return true;
+ }
+ }
+ return false;
+}
+
+class RemoteVideoDecoder final : public RemoteDataDecoder {
+ public:
+ // Render the output to the surface when the frame is sent
+ // to compositor, or release it if not presented.
+ class CompositeListener
+ : private RenderOrReleaseOutput,
+ public layers::SurfaceTextureImage::SetCurrentCallback {
+ public:
+ CompositeListener(java::CodecProxy::Param aCodec,
+ java::Sample::Param aSample)
+ : RenderOrReleaseOutput(aCodec, aSample) {}
+
+ void operator()(void) override { ReleaseOutput(true); }
+ };
+
+ class InputInfo {
+ public:
+ InputInfo() = default;
+
+ InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
+ const gfx::IntSize& aDisplaySize)
+ : mDurationUs(aDurationUs),
+ mImageSize(aImageSize),
+ mDisplaySize(aDisplaySize) {}
+
+ int64_t mDurationUs = {};
+ gfx::IntSize mImageSize = {};
+ gfx::IntSize mDisplaySize = {};
+ };
+
+ class CallbacksSupport final : public JavaCallbacksSupport {
+ public:
+ explicit CallbacksSupport(RemoteVideoDecoder* aDecoder)
+ : mDecoder(aDecoder) {}
+
+ void HandleInput(int64_t aTimestamp, bool aProcessed) override {
+ mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
+ }
+
+ void HandleOutput(java::Sample::Param aSample,
+ java::SampleBuffer::Param aBuffer) override {
+ MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
+ // aSample will be implicitly converted into a GlobalRef.
+ mDecoder->ProcessOutput(aSample);
+ }
+
+ void HandleOutputFormatChanged(
+ java::sdk::MediaFormat::Param aFormat) override {
+ int32_t colorFormat = 0;
+ aFormat->GetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT,
+ &colorFormat);
+ if (colorFormat == 0) {
+ mDecoder->Error(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Invalid color format:%d", colorFormat)));
+ return;
+ }
+
+ Maybe<int32_t> colorRange;
+ {
+ int32_t range = 0;
+ if (NS_SUCCEEDED(aFormat->GetInteger(
+ java::sdk::MediaFormat::KEY_COLOR_RANGE, &range))) {
+ colorRange.emplace(range);
+ }
+ }
+
+ Maybe<int32_t> colorSpace;
+ {
+ int32_t space = 0;
+ if (NS_SUCCEEDED(aFormat->GetInteger(
+ java::sdk::MediaFormat::KEY_COLOR_STANDARD, &space))) {
+ colorSpace.emplace(space);
+ }
+ }
+
+ mDecoder->ProcessOutputFormatChange(colorFormat, colorRange, colorSpace);
+ }
+
+ void HandleError(const MediaResult& aError) override {
+ mDecoder->Error(aError);
+ }
+
+ friend class RemoteDataDecoder;
+
+ private:
+ RemoteVideoDecoder* mDecoder;
+ };
+
+ RemoteVideoDecoder(const VideoInfo& aConfig,
+ java::sdk::MediaFormat::Param aFormat,
+ const nsString& aDrmStubId, Maybe<TrackingId> aTrackingId)
+ : RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
+ aFormat, aDrmStubId),
+ mConfig(aConfig),
+ mTrackingId(std::move(aTrackingId)) {}
+
+ ~RemoteVideoDecoder() {
+ if (mSurface) {
+ java::SurfaceAllocator::DisposeSurface(mSurface);
+ }
+ }
+
+ RefPtr<InitPromise> Init() override {
+ mThread = GetCurrentSerialEventTarget();
+ java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
+ if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
+ !bufferInfo) {
+ return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
+ }
+ mInputBufferInfo = bufferInfo;
+
+ mSurface =
+ java::GeckoSurface::LocalRef(java::SurfaceAllocator::AcquireSurface(
+ mConfig.mImage.width, mConfig.mImage.height, false));
+ if (!mSurface) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
+
+ mSurfaceHandle = mSurface->GetHandle();
+
+ // Register native methods.
+ JavaCallbacksSupport::Init();
+
+ mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
+ if (!mJavaCallbacks) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
+ JavaCallbacksSupport::AttachNative(
+ mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
+
+ mJavaDecoder = java::CodecProxy::Create(
+ false, // false indicates to create a decoder and true denotes encoder
+ mFormat, mSurface, mJavaCallbacks, mDrmStubId);
+ if (mJavaDecoder == nullptr) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
+ mIsCodecSupportAdaptivePlayback =
+ mJavaDecoder->IsAdaptivePlaybackSupported();
+ mIsHardwareAccelerated = mJavaDecoder->IsHardwareAccelerated();
+
+ // On some devices we have observed that the transform obtained from
+ // SurfaceTexture.getTransformMatrix() is incorrect for surfaces produced by
+ // a MediaCodec. We therefore override the transform to be a simple y-flip
+ // to ensure it is rendered correctly.
+ const auto hardware = java::sdk::Build::HARDWARE()->ToString();
+ if (hardware.EqualsASCII("mt6735") || hardware.EqualsASCII("kirin980")) {
+ mTransformOverride = Some(
+ gfx::Matrix4x4::Scaling(1.0, -1.0, 1.0).PostTranslate(0.0, 1.0, 0.0));
+ }
+
+ mMediaInfoFlag = MediaInfoFlag::None;
+ mMediaInfoFlag |= mIsHardwareAccelerated ? MediaInfoFlag::HardwareDecoding
+ : MediaInfoFlag::SoftwareDecoding;
+ if (mMimeType.EqualsLiteral("video/mp4") ||
+ mMimeType.EqualsLiteral("video/avc")) {
+ mMediaInfoFlag |= MediaInfoFlag::VIDEO_H264;
+ } else if (mMimeType.EqualsLiteral("video/vp8")) {
+ mMediaInfoFlag |= MediaInfoFlag::VIDEO_VP8;
+ } else if (mMimeType.EqualsLiteral("video/vp9")) {
+ mMediaInfoFlag |= MediaInfoFlag::VIDEO_VP9;
+ } else if (mMimeType.EqualsLiteral("video/av1")) {
+ mMediaInfoFlag |= MediaInfoFlag::VIDEO_AV1;
+ }
+ return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
+ }
+
+ RefPtr<MediaDataDecoder::FlushPromise> Flush() override {
+ AssertOnThread();
+ mInputInfos.Clear();
+ mSeekTarget.reset();
+ mLatestOutputTime.reset();
+ mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
+ return RemoteDataDecoder::Flush();
+ }
+
+ nsCString GetCodecName() const override {
+ if (mMediaInfoFlag & MediaInfoFlag::VIDEO_H264) {
+ return "h264"_ns;
+ }
+ if (mMediaInfoFlag & MediaInfoFlag::VIDEO_VP8) {
+ return "vp8"_ns;
+ }
+ if (mMediaInfoFlag & MediaInfoFlag::VIDEO_VP9) {
+ return "vp9"_ns;
+ }
+ if (mMediaInfoFlag & MediaInfoFlag::VIDEO_AV1) {
+ return "av1"_ns;
+ }
+ return "unknown"_ns;
+ }
+
+ RefPtr<MediaDataDecoder::DecodePromise> Decode(
+ MediaRawData* aSample) override {
+ AssertOnThread();
+
+ if (NeedsNewDecoder()) {
+ return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER,
+ __func__);
+ }
+
+ const VideoInfo* config =
+ aSample->mTrackInfo ? aSample->mTrackInfo->GetAsVideoInfo() : &mConfig;
+ MOZ_ASSERT(config);
+
+ mTrackingId.apply([&](const auto& aId) {
+ MediaInfoFlag flag = mMediaInfoFlag;
+ flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
+ : MediaInfoFlag::NonKeyFrame);
+ mPerformanceRecorder.Start(aSample->mTime.ToMicroseconds(),
+ "AndroidDecoder"_ns, aId, flag);
+ });
+
+ InputInfo info(aSample->mDuration.ToMicroseconds(), config->mImage,
+ config->mDisplay);
+ mInputInfos.Insert(aSample->mTime.ToMicroseconds(), info);
+ return RemoteDataDecoder::Decode(aSample);
+ }
+
+ bool SupportDecoderRecycling() const override {
+ return mIsCodecSupportAdaptivePlayback;
+ }
+
+ void SetSeekThreshold(const TimeUnit& aTime) override {
+ auto setter = [self = RefPtr{this}, aTime] {
+ if (aTime.IsValid()) {
+ self->mSeekTarget = Some(aTime);
+ } else {
+ self->mSeekTarget.reset();
+ }
+ };
+ if (mThread->IsOnCurrentThread()) {
+ setter();
+ } else {
+ nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
+ "RemoteVideoDecoder::SetSeekThreshold", std::move(setter));
+ nsresult rv = mThread->Dispatch(runnable.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ }
+ }
+
+ bool IsUsefulData(const RefPtr<MediaData>& aSample) override {
+ AssertOnThread();
+
+ if (mLatestOutputTime && aSample->mTime < mLatestOutputTime.value()) {
+ return false;
+ }
+
+ const TimeUnit endTime = aSample->GetEndTime();
+ if (mSeekTarget && endTime <= mSeekTarget.value()) {
+ return false;
+ }
+
+ mSeekTarget.reset();
+ mLatestOutputTime = Some(endTime);
+ return true;
+ }
+
+ bool IsHardwareAccelerated(nsACString& aFailureReason) const override {
+ return mIsHardwareAccelerated;
+ }
+
+ ConversionRequired NeedsConversion() const override {
+ return ConversionRequired::kNeedAnnexB;
+ }
+
+ private:
+ // Param and LocalRef are only valid for the duration of a JNI method call.
+ // Use GlobalRef as the parameter type to keep the Java object referenced
+ // until running.
+ void ProcessOutput(java::Sample::GlobalRef&& aSample) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv =
+ mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&>(
+ "RemoteVideoDecoder::ProcessOutput", this,
+ &RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ aSample->Dispose();
+ return;
+ }
+
+ UniquePtr<layers::SurfaceTextureImage::SetCurrentCallback> releaseSample(
+ new CompositeListener(mJavaDecoder, aSample));
+
+ // If our output surface has been released (due to the GPU process crashing)
+ // then request a new decoder, which will in turn allocate a new
+ // Surface. This is usually be handled by the Error() callback, but on some
+ // devices (or at least on the emulator) the java decoder does not raise an
+ // error when the Surface is released. So we raise this error here as well.
+ if (NeedsNewDecoder()) {
+ Error(MediaResult(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER,
+ RESULT_DETAIL("VideoCallBack::HandleOutput")));
+ return;
+ }
+
+ java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
+ MOZ_ASSERT(info);
+
+ int32_t flags;
+ bool ok = NS_SUCCEEDED(info->Flags(&flags));
+
+ int32_t offset;
+ ok &= NS_SUCCEEDED(info->Offset(&offset));
+
+ int32_t size;
+ ok &= NS_SUCCEEDED(info->Size(&size));
+
+ int64_t presentationTimeUs;
+ ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
+
+ if (!ok) {
+ Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("VideoCallBack::HandleOutput")));
+ return;
+ }
+
+ InputInfo inputInfo;
+ ok = mInputInfos.Find(presentationTimeUs, inputInfo);
+ bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+ if (!ok && !isEOS) {
+ // Ignore output with no corresponding input.
+ return;
+ }
+
+ if (ok && (size > 0 || presentationTimeUs >= 0)) {
+ // On certain devices SMPTE 432 color primaries are rendered incorrectly,
+ // so we force BT709 to be used instead.
+ // Color space 10 comes from the video in bug 1866020 and corresponds to
+ // libstagefright's kColorStandardDCI_P3.
+ // 65800 comes from the video in bug 1879720 and is vendor-specific.
+ static bool isSmpte432Buggy = areSmpte432ColorPrimariesBuggy();
+ bool forceBT709ColorSpace =
+ isSmpte432Buggy &&
+ (mColorSpace == Some(10) || mColorSpace == Some(65800));
+
+ RefPtr<layers::Image> img = new layers::SurfaceTextureImage(
+ mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
+ gl::OriginPos::BottomLeft, mConfig.HasAlpha(), forceBT709ColorSpace,
+ mTransformOverride);
+ img->AsSurfaceTextureImage()->RegisterSetCurrentCallback(
+ std::move(releaseSample));
+
+ RefPtr<VideoData> v = VideoData::CreateFromImage(
+ inputInfo.mDisplaySize, offset,
+ TimeUnit::FromMicroseconds(presentationTimeUs),
+ TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img.forget(),
+ !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME),
+ TimeUnit::FromMicroseconds(presentationTimeUs));
+
+ mPerformanceRecorder.Record(presentationTimeUs, [&](DecodeStage& aStage) {
+ using Cap = java::sdk::MediaCodecInfo::CodecCapabilities;
+ using Fmt = java::sdk::MediaFormat;
+ mColorFormat.apply([&](int32_t aFormat) {
+ switch (aFormat) {
+ case Cap::COLOR_Format32bitABGR8888:
+ case Cap::COLOR_Format32bitARGB8888:
+ case Cap::COLOR_Format32bitBGRA8888:
+ case Cap::COLOR_FormatRGBAFlexible:
+ aStage.SetImageFormat(DecodeStage::RGBA32);
+ break;
+ case Cap::COLOR_Format24bitBGR888:
+ case Cap::COLOR_Format24bitRGB888:
+ case Cap::COLOR_FormatRGBFlexible:
+ aStage.SetImageFormat(DecodeStage::RGB24);
+ break;
+ case Cap::COLOR_FormatYUV411Planar:
+ case Cap::COLOR_FormatYUV411PackedPlanar:
+ case Cap::COLOR_FormatYUV420Planar:
+ case Cap::COLOR_FormatYUV420PackedPlanar:
+ case Cap::COLOR_FormatYUV420Flexible:
+ aStage.SetImageFormat(DecodeStage::YUV420P);
+ break;
+ case Cap::COLOR_FormatYUV420SemiPlanar:
+ case Cap::COLOR_FormatYUV420PackedSemiPlanar:
+ case Cap::COLOR_QCOM_FormatYUV420SemiPlanar:
+ case Cap::COLOR_TI_FormatYUV420PackedSemiPlanar:
+ aStage.SetImageFormat(DecodeStage::NV12);
+ break;
+ case Cap::COLOR_FormatYCbYCr:
+ case Cap::COLOR_FormatYCrYCb:
+ case Cap::COLOR_FormatCbYCrY:
+ case Cap::COLOR_FormatCrYCbY:
+ case Cap::COLOR_FormatYUV422Planar:
+ case Cap::COLOR_FormatYUV422PackedPlanar:
+ case Cap::COLOR_FormatYUV422Flexible:
+ aStage.SetImageFormat(DecodeStage::YUV422P);
+ break;
+ case Cap::COLOR_FormatYUV444Interleaved:
+ case Cap::COLOR_FormatYUV444Flexible:
+ aStage.SetImageFormat(DecodeStage::YUV444P);
+ break;
+ case Cap::COLOR_FormatSurface:
+ aStage.SetImageFormat(DecodeStage::ANDROID_SURFACE);
+ break;
+ /* Added in API level 33
+ case Cap::COLOR_FormatYUVP010:
+ aStage.SetImageFormat(DecodeStage::P010);
+ break;
+ */
+ default:
+ NS_WARNING(nsPrintfCString("Unhandled color format %d (0x%08x)",
+ aFormat, aFormat)
+ .get());
+ }
+ });
+ mColorRange.apply([&](int32_t aRange) {
+ switch (aRange) {
+ case Fmt::COLOR_RANGE_FULL:
+ aStage.SetColorRange(gfx::ColorRange::FULL);
+ break;
+ case Fmt::COLOR_RANGE_LIMITED:
+ aStage.SetColorRange(gfx::ColorRange::LIMITED);
+ break;
+ default:
+ NS_WARNING(nsPrintfCString("Unhandled color range %d (0x%08x)",
+ aRange, aRange)
+ .get());
+ }
+ });
+ mColorSpace.apply([&](int32_t aSpace) {
+ switch (aSpace) {
+ case Fmt::COLOR_STANDARD_BT2020:
+ aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT2020);
+ break;
+ case Fmt::COLOR_STANDARD_BT601_NTSC:
+ case Fmt::COLOR_STANDARD_BT601_PAL:
+ aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT601);
+ break;
+ case Fmt::COLOR_STANDARD_BT709:
+ aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT709);
+ break;
+ default:
+ NS_WARNING(nsPrintfCString("Unhandled color space %d (0x%08x)",
+ aSpace, aSpace)
+ .get());
+ }
+ });
+ aStage.SetResolution(v->mImage->GetSize().Width(),
+ v->mImage->GetSize().Height());
+ });
+
+ RemoteDataDecoder::UpdateOutputStatus(std::move(v));
+ }
+
+ if (isEOS) {
+ DrainComplete();
+ }
+ }
+
+ void ProcessOutputFormatChange(int32_t aColorFormat,
+ Maybe<int32_t> aColorRange,
+ Maybe<int32_t> aColorSpace) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv = mThread->Dispatch(
+ NewRunnableMethod<int32_t, Maybe<int32_t>, Maybe<int32_t>>(
+ "RemoteVideoDecoder::ProcessOutputFormatChange", this,
+ &RemoteVideoDecoder::ProcessOutputFormatChange, aColorFormat,
+ aColorRange, aColorSpace));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+
+ AssertOnThread();
+
+ mColorFormat = Some(aColorFormat);
+ mColorRange = aColorRange;
+ mColorSpace = aColorSpace;
+ }
+
+ bool NeedsNewDecoder() const override {
+ return !mSurface || mSurface->IsReleased();
+ }
+
+ const VideoInfo mConfig;
+ java::GeckoSurface::GlobalRef mSurface;
+ AndroidSurfaceTextureHandle mSurfaceHandle{};
+ // Used to override the SurfaceTexture transform on some devices where the
+ // decoder provides a buggy value.
+ Maybe<gfx::Matrix4x4> mTransformOverride;
+ // Only accessed on reader's task queue.
+ bool mIsCodecSupportAdaptivePlayback = false;
+ // Can be accessed on any thread, but only written on during init.
+ bool mIsHardwareAccelerated = false;
+ // Accessed on mThread and reader's thread. SimpleMap however is
+ // thread-safe, so it's okay to do so.
+ SimpleMap<InputInfo> mInputInfos;
+ // Only accessed on mThread.
+ Maybe<TimeUnit> mSeekTarget;
+ Maybe<TimeUnit> mLatestOutputTime;
+ Maybe<int32_t> mColorFormat;
+ Maybe<int32_t> mColorRange;
+ Maybe<int32_t> mColorSpace;
+ // Only accessed on mThread.
+ // Tracking id for the performance recorder.
+ const Maybe<TrackingId> mTrackingId;
+ // Can be accessed on any thread, but only written during init.
+ // Pre-filled decode info used by the performance recorder.
+ MediaInfoFlag mMediaInfoFlag = {};
+ // Only accessed on mThread.
+ // Records decode performance to the profiler.
+ PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder;
+};
+
+class RemoteAudioDecoder final : public RemoteDataDecoder {
+ public:
+ RemoteAudioDecoder(const AudioInfo& aConfig,
+ java::sdk::MediaFormat::Param aFormat,
+ const nsString& aDrmStubId)
+ : RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
+ aFormat, aDrmStubId),
+ mOutputChannels(AssertedCast<int32_t>(aConfig.mChannels)),
+ mOutputSampleRate(AssertedCast<int32_t>(aConfig.mRate)) {
+ JNIEnv* const env = jni::GetEnvForThread();
+
+ bool formatHasCSD = false;
+ NS_ENSURE_SUCCESS_VOID(aFormat->ContainsKey(u"csd-0"_ns, &formatHasCSD));
+
+ // It would be nice to instead use more specific information here, but
+ // we force a byte buffer for now since this handles arbitrary codecs.
+ // TODO(bug 1768564): implement further type checking for codec data.
+ RefPtr<MediaByteBuffer> audioCodecSpecificBinaryBlob =
+ ForceGetAudioCodecSpecificBlob(aConfig.mCodecSpecificConfig);
+ if (!formatHasCSD && audioCodecSpecificBinaryBlob->Length() >= 2) {
+ jni::ByteBuffer::LocalRef buffer(env);
+ buffer = jni::ByteBuffer::New(audioCodecSpecificBinaryBlob->Elements(),
+ audioCodecSpecificBinaryBlob->Length());
+ NS_ENSURE_SUCCESS_VOID(aFormat->SetByteBuffer(u"csd-0"_ns, buffer));
+ }
+ }
+
+ RefPtr<InitPromise> Init() override {
+ mThread = GetCurrentSerialEventTarget();
+ java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
+ if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
+ !bufferInfo) {
+ return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
+ }
+ mInputBufferInfo = bufferInfo;
+
+ // Register native methods.
+ JavaCallbacksSupport::Init();
+
+ mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
+ if (!mJavaCallbacks) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
+ JavaCallbacksSupport::AttachNative(
+ mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
+
+ mJavaDecoder = java::CodecProxy::Create(false, mFormat, nullptr,
+ mJavaCallbacks, mDrmStubId);
+ if (mJavaDecoder == nullptr) {
+ return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
+
+ return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
+ }
+
+ nsCString GetCodecName() const override {
+ if (mMimeType.EqualsLiteral("audio/mp4a-latm")) {
+ return "aac"_ns;
+ }
+ return "unknown"_ns;
+ }
+
+ RefPtr<FlushPromise> Flush() override {
+ AssertOnThread();
+ mFirstDemuxedSampleTime.reset();
+ return RemoteDataDecoder::Flush();
+ }
+
+ RefPtr<DecodePromise> Decode(MediaRawData* aSample) override {
+ AssertOnThread();
+ if (!mFirstDemuxedSampleTime) {
+ MOZ_ASSERT(aSample->mTime.IsValid());
+ mFirstDemuxedSampleTime.emplace(aSample->mTime);
+ }
+ return RemoteDataDecoder::Decode(aSample);
+ }
+
+ private:
+ class CallbacksSupport final : public JavaCallbacksSupport {
+ public:
+ explicit CallbacksSupport(RemoteAudioDecoder* aDecoder)
+ : mDecoder(aDecoder) {}
+
+ void HandleInput(int64_t aTimestamp, bool aProcessed) override {
+ mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
+ }
+
+ void HandleOutput(java::Sample::Param aSample,
+ java::SampleBuffer::Param aBuffer) override {
+ MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
+ // aSample will be implicitly converted into a GlobalRef.
+ mDecoder->ProcessOutput(aSample, aBuffer);
+ }
+
+ void HandleOutputFormatChanged(
+ java::sdk::MediaFormat::Param aFormat) override {
+ int32_t outputChannels = 0;
+ aFormat->GetInteger(u"channel-count"_ns, &outputChannels);
+ AudioConfig::ChannelLayout layout(outputChannels);
+ if (!layout.IsValid()) {
+ mDecoder->Error(MediaResult(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Invalid channel layout:%d", outputChannels)));
+ return;
+ }
+
+ int32_t sampleRate = 0;
+ aFormat->GetInteger(u"sample-rate"_ns, &sampleRate);
+ LOG("Audio output format changed: channels:%d sample rate:%d",
+ outputChannels, sampleRate);
+
+ mDecoder->ProcessOutputFormatChange(outputChannels, sampleRate);
+ }
+
+ void HandleError(const MediaResult& aError) override {
+ mDecoder->Error(aError);
+ }
+
+ private:
+ RemoteAudioDecoder* mDecoder;
+ };
+
+ bool IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const {
+ return mFirstDemuxedSampleTime->ToMicroseconds() > aTime;
+ }
+
+ bool ShouldDiscardSample(int64_t aSession) const {
+ AssertOnThread();
+ // HandleOutput() runs on Android binder thread pool and could be preempted
+ // by RemoteDateDecoder task queue. That means ProcessOutput() could be
+ // scheduled after Shutdown() or Flush(). We won't need the
+ // sample which is returned after calling Shutdown() and Flush(). We can
+ // check mFirstDemuxedSampleTime to know whether the Flush() has been
+ // called, becasue it would be reset in Flush().
+ return GetState() == State::SHUTDOWN || !mFirstDemuxedSampleTime ||
+ mSession != aSession;
+ }
+
+ // Param and LocalRef are only valid for the duration of a JNI method call.
+ // Use GlobalRef as the parameter type to keep the Java object referenced
+ // until running.
+ void ProcessOutput(java::Sample::GlobalRef&& aSample,
+ java::SampleBuffer::GlobalRef&& aBuffer) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv =
+ mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
+ java::SampleBuffer::GlobalRef&&>(
+ "RemoteAudioDecoder::ProcessOutput", this,
+ &RemoteAudioDecoder::ProcessOutput, std::move(aSample),
+ std::move(aBuffer)));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+
+ AssertOnThread();
+
+ LOG("ProcessOutput");
+
+ if (ShouldDiscardSample(aSample->Session()) || !aBuffer->IsValid()) {
+ aSample->Dispose();
+ LOG("Discarding sample");
+ return;
+ }
+
+ RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
+
+ java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
+ MOZ_ASSERT(info);
+
+ int32_t flags = 0;
+ bool ok = NS_SUCCEEDED(info->Flags(&flags));
+ bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+
+ int32_t offset;
+ ok &= NS_SUCCEEDED(info->Offset(&offset));
+
+ int64_t presentationTimeUs;
+ ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
+
+ int32_t size;
+ ok &= NS_SUCCEEDED(info->Size(&size));
+
+ if (!ok ||
+ (IsSampleTimeSmallerThanFirstDemuxedSampleTime(presentationTimeUs) &&
+ !isEOS)) {
+ LOG("ProcessOutput: decoding error ok[%s], pts[%" PRId64 "], eos[%s]",
+ ok ? "true" : "false", presentationTimeUs, isEOS ? "true" : "false");
+ Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
+ return;
+ }
+
+ if (size > 0) {
+ const int32_t sampleSize = sizeof(int16_t);
+ const int32_t numSamples = size / sampleSize;
+
+ InflatableShortBuffer audio(numSamples);
+ if (!audio) {
+ Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
+ LOG("OOM while allocating temporary output buffer");
+ return;
+ }
+ jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(audio.get(), size);
+ aBuffer->WriteToByteBuffer(dest, offset, size);
+ AlignedFloatBuffer converted = audio.Inflate();
+
+ TimeUnit pts = TimeUnit::FromMicroseconds(presentationTimeUs);
+
+ LOG("Decoded: %u frames of %s audio, pts: %s, %d channels, %" PRId32
+ " Hz",
+ numSamples / mOutputChannels,
+ sampleSize == sizeof(int16_t) ? "int16" : "f32", pts.ToString().get(),
+ mOutputChannels, mOutputSampleRate);
+
+ RefPtr<AudioData> data = new AudioData(
+ 0, pts, std::move(converted), mOutputChannels, mOutputSampleRate);
+
+ UpdateOutputStatus(std::move(data));
+ } else {
+ LOG("ProcessOutput but size 0");
+ }
+
+ if (isEOS) {
+ LOG("EOS: drain complete");
+ DrainComplete();
+ }
+ }
+
+ void ProcessOutputFormatChange(int32_t aChannels, int32_t aSampleRate) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv = mThread->Dispatch(NewRunnableMethod<int32_t, int32_t>(
+ "RemoteAudioDecoder::ProcessOutputFormatChange", this,
+ &RemoteAudioDecoder::ProcessOutputFormatChange, aChannels,
+ aSampleRate));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+
+ AssertOnThread();
+
+ mOutputChannels = aChannels;
+ mOutputSampleRate = aSampleRate;
+ }
+
+ int32_t mOutputChannels{};
+ int32_t mOutputSampleRate{};
+ Maybe<TimeUnit> mFirstDemuxedSampleTime;
+};
+
+already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
+ const CreateDecoderParams& aParams, const nsString& aDrmStubId,
+ CDMProxy* aProxy) {
+ const AudioInfo& config = aParams.AudioConfig();
+ java::sdk::MediaFormat::LocalRef format;
+ NS_ENSURE_SUCCESS(
+ java::sdk::MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
+ config.mChannels, &format),
+ nullptr);
+ // format->SetInteger(java::sdk::MediaFormat::KEY_PCM_ENCODING,
+ // java::sdk::AudioFormat::ENCODING_PCM_FLOAT);
+
+ RefPtr<MediaDataDecoder> decoder =
+ new RemoteAudioDecoder(config, format, aDrmStubId);
+ if (aProxy) {
+ decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
+ }
+ return decoder.forget();
+}
+
+already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
+ const CreateDecoderParams& aParams, const nsString& aDrmStubId,
+ CDMProxy* aProxy) {
+ const VideoInfo& config = aParams.VideoConfig();
+ java::sdk::MediaFormat::LocalRef format;
+ NS_ENSURE_SUCCESS(java::sdk::MediaFormat::CreateVideoFormat(
+ TranslateMimeType(config.mMimeType),
+ config.mImage.width, config.mImage.height, &format),
+ nullptr);
+
+ RefPtr<MediaDataDecoder> decoder =
+ new RemoteVideoDecoder(config, format, aDrmStubId, aParams.mTrackingId);
+ if (aProxy) {
+ decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
+ }
+ return decoder.forget();
+}
+
+RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
+ const nsACString& aMimeType,
+ java::sdk::MediaFormat::Param aFormat,
+ const nsString& aDrmStubId)
+ : mType(aType),
+ mMimeType(aMimeType),
+ mFormat(aFormat),
+ mDrmStubId(aDrmStubId),
+ mSession(0),
+ mNumPendingInputs(0) {}
+
+RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::Flush() {
+ AssertOnThread();
+ MOZ_ASSERT(GetState() != State::SHUTDOWN);
+
+ mDecodedData = DecodedData();
+ UpdatePendingInputStatus(PendingOp::CLEAR);
+ mDecodePromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
+ mDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
+ SetState(State::DRAINED);
+ mJavaDecoder->Flush();
+ return FlushPromise::CreateAndResolve(true, __func__);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_CANCELED,
+ __func__);
+ }
+ RefPtr<DecodePromise> p = mDrainPromise.Ensure(__func__);
+ if (GetState() == State::DRAINED) {
+ // There's no operation to perform other than returning any already
+ // decoded data.
+ ReturnDecodedData();
+ return p;
+ }
+
+ if (GetState() == State::DRAINING) {
+ // Draining operation already pending, let it complete its course.
+ return p;
+ }
+
+ SetState(State::DRAINING);
+ mInputBufferInfo->Set(0, 0, -1,
+ java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
+ mSession = mJavaDecoder->Input(nullptr, mInputBufferInfo, nullptr);
+ return p;
+}
+
+RefPtr<ShutdownPromise> RemoteDataDecoder::Shutdown() {
+ LOG("Shutdown");
+ AssertOnThread();
+ SetState(State::SHUTDOWN);
+ if (mJavaDecoder) {
+ mJavaDecoder->Release();
+ mJavaDecoder = nullptr;
+ }
+
+ if (mJavaCallbacks) {
+ JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
+ JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
+ mJavaCallbacks = nullptr;
+ }
+
+ mFormat = nullptr;
+
+ return ShutdownPromise::CreateAndResolve(true, __func__);
+}
+
+using CryptoInfoResult =
+ Result<java::sdk::MediaCodec::CryptoInfo::LocalRef, nsresult>;
+
+static CryptoInfoResult GetCryptoInfoFromSample(const MediaRawData* aSample) {
+ const auto& cryptoObj = aSample->mCrypto;
+ java::sdk::MediaCodec::CryptoInfo::LocalRef cryptoInfo;
+
+ if (!cryptoObj.IsEncrypted()) {
+ return CryptoInfoResult(cryptoInfo);
+ }
+
+ static bool supportsCBCS = java::CodecProxy::SupportsCBCS();
+ if (cryptoObj.mCryptoScheme == CryptoScheme::Cbcs && !supportsCBCS) {
+ return CryptoInfoResult(NS_ERROR_DOM_MEDIA_NOT_SUPPORTED_ERR);
+ }
+
+ nsresult rv = java::sdk::MediaCodec::CryptoInfo::New(&cryptoInfo);
+ NS_ENSURE_SUCCESS(rv, CryptoInfoResult(rv));
+
+ uint32_t numSubSamples = std::min<uint32_t>(
+ cryptoObj.mPlainSizes.Length(), cryptoObj.mEncryptedSizes.Length());
+
+ uint32_t totalSubSamplesSize = 0;
+ for (const auto& size : cryptoObj.mPlainSizes) {
+ totalSubSamplesSize += size;
+ }
+ for (const auto& size : cryptoObj.mEncryptedSizes) {
+ totalSubSamplesSize += size;
+ }
+
+ // Deep copy the plain sizes so we can modify them.
+ nsTArray<uint32_t> plainSizes = cryptoObj.mPlainSizes.Clone();
+ uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
+ // Size of codec specific data("CSD") for Android java::sdk::MediaCodec usage
+ // should be included in the 1st plain size if it exists.
+ if (codecSpecificDataSize > 0 && !plainSizes.IsEmpty()) {
+ // This shouldn't overflow as the the plain size should be UINT16_MAX at
+ // most, and the CSD should never be that large. Checked int acts like a
+ // diagnostic assert here to help catch if we ever have insane inputs.
+ CheckedUint32 newLeadingPlainSize{plainSizes[0]};
+ newLeadingPlainSize += codecSpecificDataSize;
+ plainSizes[0] = newLeadingPlainSize.value();
+ }
+
+ static const int kExpectedIVLength = 16;
+ nsTArray<uint8_t> tempIV(kExpectedIVLength);
+ jint mode;
+ switch (cryptoObj.mCryptoScheme) {
+ case CryptoScheme::None:
+ mode = java::sdk::MediaCodec::CRYPTO_MODE_UNENCRYPTED;
+ MOZ_ASSERT(cryptoObj.mIV.Length() <= kExpectedIVLength);
+ tempIV.AppendElements(cryptoObj.mIV);
+ break;
+ case CryptoScheme::Cenc:
+ mode = java::sdk::MediaCodec::CRYPTO_MODE_AES_CTR;
+ MOZ_ASSERT(cryptoObj.mIV.Length() <= kExpectedIVLength);
+ tempIV.AppendElements(cryptoObj.mIV);
+ break;
+ case CryptoScheme::Cbcs:
+ mode = java::sdk::MediaCodec::CRYPTO_MODE_AES_CBC;
+ MOZ_ASSERT(cryptoObj.mConstantIV.Length() <= kExpectedIVLength);
+ tempIV.AppendElements(cryptoObj.mConstantIV);
+ break;
+ }
+ auto tempIVLength = tempIV.Length();
+ for (size_t i = tempIVLength; i < kExpectedIVLength; i++) {
+ // Padding with 0
+ tempIV.AppendElement(0);
+ }
+
+ MOZ_ASSERT(numSubSamples <= INT32_MAX);
+ cryptoInfo->Set(static_cast<int32_t>(numSubSamples),
+ mozilla::jni::IntArray::From(plainSizes),
+ mozilla::jni::IntArray::From(cryptoObj.mEncryptedSizes),
+ mozilla::jni::ByteArray::From(cryptoObj.mKeyId),
+ mozilla::jni::ByteArray::From(tempIV), mode);
+ if (mode == java::sdk::MediaCodec::CRYPTO_MODE_AES_CBC) {
+ java::CodecProxy::SetCryptoPatternIfNeeded(
+ cryptoInfo, cryptoObj.mCryptByteBlock, cryptoObj.mSkipByteBlock);
+ }
+
+ return CryptoInfoResult(cryptoInfo);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Decode(
+ MediaRawData* aSample) {
+ AssertOnThread();
+ MOZ_ASSERT(GetState() != State::SHUTDOWN);
+ MOZ_ASSERT(aSample != nullptr);
+ jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(
+ const_cast<uint8_t*>(aSample->Data()), aSample->Size(), fallible);
+ if (!bytes) {
+ return DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
+ }
+
+ SetState(State::DRAINABLE);
+ MOZ_ASSERT(aSample->Size() <= INT32_MAX);
+ mInputBufferInfo->Set(0, static_cast<int32_t>(aSample->Size()),
+ aSample->mTime.ToMicroseconds(), 0);
+ CryptoInfoResult crypto = GetCryptoInfoFromSample(aSample);
+ if (crypto.isErr()) {
+ return DecodePromise::CreateAndReject(
+ MediaResult(crypto.unwrapErr(), __func__), __func__);
+ }
+ int64_t session =
+ mJavaDecoder->Input(bytes, mInputBufferInfo, crypto.unwrap());
+ if (session == java::CodecProxy::INVALID_SESSION) {
+ return DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
+ }
+ mSession = session;
+ return mDecodePromise.Ensure(__func__);
+}
+
+void RemoteDataDecoder::UpdatePendingInputStatus(PendingOp aOp) {
+ AssertOnThread();
+ switch (aOp) {
+ case PendingOp::INCREASE:
+ mNumPendingInputs++;
+ break;
+ case PendingOp::DECREASE:
+ mNumPendingInputs--;
+ break;
+ case PendingOp::CLEAR:
+ mNumPendingInputs = 0;
+ break;
+ }
+}
+
+void RemoteDataDecoder::UpdateInputStatus(int64_t aTimestamp, bool aProcessed) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv = mThread->Dispatch(NewRunnableMethod<int64_t, bool>(
+ "RemoteDataDecoder::UpdateInputStatus", this,
+ &RemoteDataDecoder::UpdateInputStatus, aTimestamp, aProcessed));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ return;
+ }
+
+ if (!aProcessed) {
+ UpdatePendingInputStatus(PendingOp::INCREASE);
+ } else if (HasPendingInputs()) {
+ UpdatePendingInputStatus(PendingOp::DECREASE);
+ }
+
+ if (!HasPendingInputs() || // Input has been processed, request the next one.
+ !mDecodedData.IsEmpty()) { // Previous output arrived before Decode().
+ ReturnDecodedData();
+ }
+}
+
+void RemoteDataDecoder::UpdateOutputStatus(RefPtr<MediaData>&& aSample) {
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ LOG("Update output status, but decoder has been shut down, dropping the "
+ "decoded results");
+ return;
+ }
+ if (IsUsefulData(aSample)) {
+ mDecodedData.AppendElement(std::move(aSample));
+ } else {
+ LOG("Decoded data, but not considered useful");
+ }
+ ReturnDecodedData();
+}
+
+void RemoteDataDecoder::ReturnDecodedData() {
+ AssertOnThread();
+ MOZ_ASSERT(GetState() != State::SHUTDOWN);
+
+ // We only want to clear mDecodedData when we have resolved the promises.
+ if (!mDecodePromise.IsEmpty()) {
+ mDecodePromise.Resolve(std::move(mDecodedData), __func__);
+ mDecodedData = DecodedData();
+ } else if (!mDrainPromise.IsEmpty() &&
+ (!mDecodedData.IsEmpty() || GetState() == State::DRAINED)) {
+ mDrainPromise.Resolve(std::move(mDecodedData), __func__);
+ mDecodedData = DecodedData();
+ }
+}
+
+void RemoteDataDecoder::DrainComplete() {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv = mThread->Dispatch(
+ NewRunnableMethod("RemoteDataDecoder::DrainComplete", this,
+ &RemoteDataDecoder::DrainComplete));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ return;
+ }
+ SetState(State::DRAINED);
+ ReturnDecodedData();
+}
+
+void RemoteDataDecoder::Error(const MediaResult& aError) {
+ if (!mThread->IsOnCurrentThread()) {
+ nsresult rv = mThread->Dispatch(NewRunnableMethod<MediaResult>(
+ "RemoteDataDecoder::Error", this, &RemoteDataDecoder::Error, aError));
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+ return;
+ }
+ AssertOnThread();
+ if (GetState() == State::SHUTDOWN) {
+ return;
+ }
+
+ // If we know we need a new decoder (eg because RemoteVideoDecoder's mSurface
+ // has been released due to a GPU process crash) then override the error to
+ // request a new decoder.
+ const MediaResult& error =
+ NeedsNewDecoder()
+ ? MediaResult(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER, __func__)
+ : aError;
+
+ mDecodePromise.RejectIfExists(error, __func__);
+ mDrainPromise.RejectIfExists(error, __func__);
+}
+
+} // namespace mozilla
+#undef LOG
diff --git a/dom/media/platforms/android/RemoteDataDecoder.h b/dom/media/platforms/android/RemoteDataDecoder.h
new file mode 100644
index 0000000000..5dfda4a7f5
--- /dev/null
+++ b/dom/media/platforms/android/RemoteDataDecoder.h
@@ -0,0 +1,112 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef RemoteDataDecoder_h_
+#define RemoteDataDecoder_h_
+
+#include "AndroidDecoderModule.h"
+#include "SurfaceTexture.h"
+#include "TimeUnits.h"
+#include "mozilla/Monitor.h"
+#include "mozilla/java/CodecProxyWrappers.h"
+
+namespace mozilla {
+
+DDLoggedTypeDeclNameAndBase(RemoteDataDecoder, MediaDataDecoder);
+
+class RemoteDataDecoder : public MediaDataDecoder,
+ public DecoderDoctorLifeLogger<RemoteDataDecoder> {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(RemoteDataDecoder, final);
+
+ static already_AddRefed<MediaDataDecoder> CreateAudioDecoder(
+ const CreateDecoderParams& aParams, const nsString& aDrmStubId,
+ CDMProxy* aProxy);
+
+ static already_AddRefed<MediaDataDecoder> CreateVideoDecoder(
+ const CreateDecoderParams& aParams, const nsString& aDrmStubId,
+ CDMProxy* aProxy);
+
+ RefPtr<DecodePromise> Decode(MediaRawData* aSample) override;
+ RefPtr<DecodePromise> Drain() override;
+ RefPtr<FlushPromise> Flush() override;
+ RefPtr<ShutdownPromise> Shutdown() override;
+ nsCString GetDescriptionName() const override {
+ return "android decoder (remote)"_ns;
+ }
+
+ protected:
+ virtual ~RemoteDataDecoder() = default;
+ RemoteDataDecoder(MediaData::Type aType, const nsACString& aMimeType,
+ java::sdk::MediaFormat::Param aFormat,
+ const nsString& aDrmStubId);
+
+ // Methods only called on mThread.
+ void UpdateInputStatus(int64_t aTimestamp, bool aProcessed);
+ void UpdateOutputStatus(RefPtr<MediaData>&& aSample);
+ void ReturnDecodedData();
+ void DrainComplete();
+ void Error(const MediaResult& aError);
+ void AssertOnThread() const {
+ // mThread may not be set if Init hasn't been called first.
+ MOZ_ASSERT(!mThread || mThread->IsOnCurrentThread());
+ }
+
+ enum class State { DRAINED, DRAINABLE, DRAINING, SHUTDOWN };
+ void SetState(State aState) {
+ AssertOnThread();
+ mState = aState;
+ }
+ State GetState() const {
+ AssertOnThread();
+ return mState;
+ }
+
+ // Whether the sample will be used.
+ virtual bool IsUsefulData(const RefPtr<MediaData>& aSample) { return true; }
+
+ MediaData::Type mType;
+
+ nsAutoCString mMimeType;
+ java::sdk::MediaFormat::GlobalRef mFormat;
+
+ java::CodecProxy::GlobalRef mJavaDecoder;
+ java::CodecProxy::NativeCallbacks::GlobalRef mJavaCallbacks;
+ nsString mDrmStubId;
+
+ nsCOMPtr<nsISerialEventTarget> mThread;
+
+ // Preallocated Java object used as a reusable storage for input buffer
+ // information. Contents must be changed only on mThread.
+ java::sdk::MediaCodec::BufferInfo::GlobalRef mInputBufferInfo;
+
+ // Session ID attached to samples. It is returned by CodecProxy::Input().
+ // Accessed on mThread only.
+ int64_t mSession;
+
+ private:
+ enum class PendingOp { INCREASE, DECREASE, CLEAR };
+ void UpdatePendingInputStatus(PendingOp aOp);
+ size_t HasPendingInputs() {
+ AssertOnThread();
+ return mNumPendingInputs > 0;
+ }
+
+ // Returns true if we are in a state which requires a new decoder to be
+ // created. In this case all errors will be reported as
+ // NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER to avoid reporting errors as fatal when
+ // they can be fixed with a new decoder.
+ virtual bool NeedsNewDecoder() const { return false; }
+
+ // The following members must only be accessed on mThread.
+ MozPromiseHolder<DecodePromise> mDecodePromise;
+ MozPromiseHolder<DecodePromise> mDrainPromise;
+ DecodedData mDecodedData;
+ State mState = State::DRAINED;
+ size_t mNumPendingInputs;
+};
+
+} // namespace mozilla
+
+#endif