/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "AndroidDataEncoder.h" #include "AnnexB.h" #include "H264.h" #include "MediaData.h" #include "MediaInfo.h" #include "SimpleMap.h" #include "ImageContainer.h" #include "mozilla/Logging.h" #include "mozilla/ResultVariant.h" #include "nsMimeTypes.h" #include "libyuv.h" namespace mozilla { using media::TimeUnit; extern LazyLogModule sPEMLog; #define AND_ENC_LOG(arg, ...) \ MOZ_LOG(sPEMLog, mozilla::LogLevel::Debug, \ ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) #define AND_ENC_LOGE(arg, ...) \ MOZ_LOG(sPEMLog, mozilla::LogLevel::Error, \ ("AndroidDataEncoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) #define REJECT_IF_ERROR() \ do { \ if (mError) { \ auto error = mError.value(); \ mError.reset(); \ return EncodePromise::CreateAndReject(std::move(error), __func__); \ } \ } while (0) template RefPtr AndroidDataEncoder::Init() { // Sanity-check the input size for Android software encoder fails to do it. if (mConfig.mSize.width == 0 || mConfig.mSize.height == 0) { return InitPromise::CreateAndReject(NS_ERROR_ILLEGAL_VALUE, __func__); } return InvokeAsync(mTaskQueue, this, __func__, &AndroidDataEncoder::ProcessInit); } static const char* MimeTypeOf(MediaDataEncoder::CodecType aCodec) { switch (aCodec) { case MediaDataEncoder::CodecType::H264: return "video/avc"; case MediaDataEncoder::CodecType::VP8: return "video/x-vnd.on2.vp8"; case MediaDataEncoder::CodecType::VP9: return "video/x-vnd.on2.vp9"; default: return ""; } } using FormatResult = Result; template FormatResult ToMediaFormat(const ConfigType& aConfig) { nsresult rv = NS_OK; java::sdk::MediaFormat::LocalRef format; rv = java::sdk::MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodecType), aConfig.mSize.width, aConfig.mSize.height, &format); NS_ENSURE_SUCCESS( rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to create Java MediaFormat object"))); rv = format->SetInteger(java::sdk::MediaFormat::KEY_BITRATE_MODE, 2 /* CBR */); NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to set bitrate mode"))); rv = format->SetInteger(java::sdk::MediaFormat::KEY_BIT_RATE, aConfig.mBitsPerSec); NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to set bitrate"))); // COLOR_FormatYUV420SemiPlanar(NV12) is the most widely supported // format. rv = format->SetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT, 0x15); NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to set color format"))); rv = format->SetInteger(java::sdk::MediaFormat::KEY_FRAME_RATE, aConfig.mFramerate); NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to set frame rate"))); // Ensure interval >= 1. A negative value means no key frames are // requested after the first frame. A zero value means a stream // containing all key frames is requested. int32_t intervalInSec = std::max(1, aConfig.mKeyframeInterval / aConfig.mFramerate); rv = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL, intervalInSec); NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "fail to set I-frame interval"))); return format; } template RefPtr AndroidDataEncoder::ProcessInit() { AssertOnTaskQueue(); MOZ_ASSERT(!mJavaEncoder); java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo; if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) || !bufferInfo) { return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__); } mInputBufferInfo = bufferInfo; FormatResult result = ToMediaFormat(mConfig); if (result.isErr()) { return InitPromise::CreateAndReject(result.unwrapErr(), __func__); } mFormat = result.unwrap(); // Register native methods. JavaCallbacksSupport::Init(); mJavaCallbacks = java::CodecProxy::NativeCallbacks::New(); if (!mJavaCallbacks) { return InitPromise::CreateAndReject( MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "cannot create Java callback object"), __func__); } JavaCallbacksSupport::AttachNative( mJavaCallbacks, mozilla::MakeUnique(this)); mJavaEncoder = java::CodecProxy::Create(true /* encoder */, mFormat, nullptr, mJavaCallbacks, u""_ns); if (!mJavaEncoder) { return InitPromise::CreateAndReject( MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, "cannot create Java encoder object"), __func__); } mIsHardwareAccelerated = mJavaEncoder->IsHardwareAccelerated(); mDrainState = DrainState::DRAINABLE; return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__); } template RefPtr AndroidDataEncoder::Encode( const MediaData* aSample) { RefPtr self = this; MOZ_ASSERT(aSample != nullptr); RefPtr sample(aSample); return InvokeAsync(mTaskQueue, __func__, [self, sample]() { return self->ProcessEncode(std::move(sample)); }); } static jni::ByteBuffer::LocalRef ConvertI420ToNV12Buffer( RefPtr aSample, RefPtr& aYUVBuffer, int aStride, int aYPlaneHeight) { const layers::PlanarYCbCrImage* image = aSample->mImage->AsPlanarYCbCrImage(); MOZ_ASSERT(image); const layers::PlanarYCbCrData* yuv = image->GetData(); auto ySize = yuv->YDataSize(); auto cbcrSize = yuv->CbCrDataSize(); // If we have a stride or height passed in from the Codec we need to use // those. auto yStride = aStride != 0 ? aStride : yuv->mYStride; auto height = aYPlaneHeight != 0 ? aYPlaneHeight : ySize.height; size_t yLength = yStride * height; size_t length = yLength + yStride * (cbcrSize.height - 1) + cbcrSize.width * 2; if (!aYUVBuffer || aYUVBuffer->Capacity() < length) { aYUVBuffer = MakeRefPtr(length); aYUVBuffer->SetLength(length); } else { MOZ_ASSERT(aYUVBuffer->Length() >= length); } if (libyuv::I420ToNV12(yuv->mYChannel, yuv->mYStride, yuv->mCbChannel, yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride, aYUVBuffer->Elements(), yStride, aYUVBuffer->Elements() + yLength, yStride, ySize.width, ySize.height) != 0) { return nullptr; } return jni::ByteBuffer::New(aYUVBuffer->Elements(), aYUVBuffer->Length()); } template RefPtr AndroidDataEncoder::ProcessEncode(RefPtr aSample) { AssertOnTaskQueue(); REJECT_IF_ERROR(); RefPtr sample(aSample->As()); MOZ_ASSERT(sample); // Bug 1789846: Check with the Encoder if MediaCodec has a stride or height // value to use. jni::ByteBuffer::LocalRef buffer = ConvertI420ToNV12Buffer( sample, mYUVBuffer, mJavaEncoder->GetInputFormatStride(), mJavaEncoder->GetInputFormatYPlaneHeight()); if (!buffer) { return EncodePromise::CreateAndReject(NS_ERROR_ILLEGAL_INPUT, __func__); } if (aSample->mKeyframe) { mInputBufferInfo->Set(0, mYUVBuffer->Length(), aSample->mTime.ToMicroseconds(), java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME); } else { mInputBufferInfo->Set(0, mYUVBuffer->Length(), aSample->mTime.ToMicroseconds(), 0); } mJavaEncoder->Input(buffer, mInputBufferInfo, nullptr); if (mEncodedData.Length() > 0) { EncodedData pending = std::move(mEncodedData); return EncodePromise::CreateAndResolve(std::move(pending), __func__); } else { return EncodePromise::CreateAndResolve(EncodedData(), __func__); } } class AutoRelease final { public: AutoRelease(java::CodecProxy::Param aEncoder, java::Sample::Param aSample) : mEncoder(aEncoder), mSample(aSample) {} ~AutoRelease() { mEncoder->ReleaseOutput(mSample, false); } private: java::CodecProxy::GlobalRef mEncoder; java::Sample::GlobalRef mSample; }; static RefPtr ExtractCodecConfig( java::SampleBuffer::Param aBuffer, const int32_t aOffset, const int32_t aSize, const bool aAsAnnexB) { auto annexB = MakeRefPtr(aSize); annexB->SetLength(aSize); jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(annexB->Elements(), aSize); aBuffer->WriteToByteBuffer(dest, aOffset, aSize); if (aAsAnnexB) { return annexB; } // Convert to avcC. nsTArray paramSets; AnnexB::ParseNALEntries( Span(annexB->Elements(), annexB->Length()), paramSets); auto avcc = MakeRefPtr(); AnnexB::NALEntry& sps = paramSets.ElementAt(0); AnnexB::NALEntry& pps = paramSets.ElementAt(1); const uint8_t* spsPtr = annexB->Elements() + sps.mOffset; H264::WriteExtraData( avcc, spsPtr[1], spsPtr[2], spsPtr[3], Span(spsPtr, sps.mSize), Span(annexB->Elements() + pps.mOffset, pps.mSize)); return avcc; } template void AndroidDataEncoder::ProcessOutput( java::Sample::GlobalRef&& aSample, java::SampleBuffer::GlobalRef&& aBuffer) { if (!mTaskQueue->IsCurrentThreadIn()) { nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod( "AndroidDataEncoder::ProcessOutput", this, &AndroidDataEncoder::ProcessOutput, std::move(aSample), std::move(aBuffer))); MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv)); Unused << rv; return; } AssertOnTaskQueue(); if (!mJavaEncoder) { return; } AutoRelease releaseSample(mJavaEncoder, aSample); java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info(); MOZ_ASSERT(info); int32_t flags; bool ok = NS_SUCCEEDED(info->Flags(&flags)); bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM); int32_t offset; ok &= NS_SUCCEEDED(info->Offset(&offset)); int32_t size; ok &= NS_SUCCEEDED(info->Size(&size)); int64_t presentationTimeUs; ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs)); if (!ok) { return; } if (size > 0) { if ((flags & java::sdk::MediaCodec::BUFFER_FLAG_CODEC_CONFIG) != 0) { mConfigData = ExtractCodecConfig(aBuffer, offset, size, mConfig.mUsage == Usage::Realtime); return; } RefPtr output = GetOutputData(aBuffer, offset, size, !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME)); output->mEOS = isEOS; output->mTime = media::TimeUnit::FromMicroseconds(presentationTimeUs); mEncodedData.AppendElement(std::move(output)); } if (isEOS) { mDrainState = DrainState::DRAINED; } if (!mDrainPromise.IsEmpty()) { EncodedData pending = std::move(mEncodedData); mDrainPromise.Resolve(std::move(pending), __func__); } } template RefPtr AndroidDataEncoder::GetOutputData( java::SampleBuffer::Param aBuffer, const int32_t aOffset, const int32_t aSize, const bool aIsKeyFrame) { // Copy frame data from Java buffer. auto output = MakeRefPtr(); UniquePtr writer(output->CreateWriter()); if (!writer->SetSize(aSize)) { AND_ENC_LOGE("fail to allocate output buffer"); return nullptr; } jni::ByteBuffer::LocalRef buf = jni::ByteBuffer::New(writer->Data(), aSize); aBuffer->WriteToByteBuffer(buf, aOffset, aSize); output->mKeyframe = aIsKeyFrame; return output; } // AVC/H.264 frame can be in avcC or Annex B and needs extra convertion steps. template <> RefPtr AndroidDataEncoder::GetOutputData( java::SampleBuffer::Param aBuffer, const int32_t aOffset, const int32_t aSize, const bool aIsKeyFrame) { auto output = MakeRefPtr(); size_t prependSize = 0; RefPtr avccHeader; if (aIsKeyFrame && mConfigData) { if (mConfig.mUsage == Usage::Realtime) { prependSize = mConfigData->Length(); } else { avccHeader = mConfigData; } } UniquePtr writer(output->CreateWriter()); if (!writer->SetSize(prependSize + aSize)) { AND_ENC_LOGE("fail to allocate output buffer"); return nullptr; } if (prependSize > 0) { PodCopy(writer->Data(), mConfigData->Elements(), prependSize); } jni::ByteBuffer::LocalRef buf = jni::ByteBuffer::New(writer->Data() + prependSize, aSize); aBuffer->WriteToByteBuffer(buf, aOffset, aSize); if (mConfig.mUsage != Usage::Realtime && !AnnexB::ConvertSampleToAVCC(output, avccHeader)) { AND_ENC_LOGE("fail to convert annex-b sample to AVCC"); return nullptr; } output->mKeyframe = aIsKeyFrame; return output; } template RefPtr AndroidDataEncoder::Drain() { return InvokeAsync(mTaskQueue, this, __func__, &AndroidDataEncoder::ProcessDrain); } template RefPtr AndroidDataEncoder::ProcessDrain() { AssertOnTaskQueue(); MOZ_ASSERT(mJavaEncoder); MOZ_ASSERT(mDrainPromise.IsEmpty()); REJECT_IF_ERROR(); switch (mDrainState) { case DrainState::DRAINABLE: mInputBufferInfo->Set(0, 0, -1, java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM); mJavaEncoder->Input(nullptr, mInputBufferInfo, nullptr); mDrainState = DrainState::DRAINING; [[fallthrough]]; case DrainState::DRAINING: if (mEncodedData.IsEmpty()) { return mDrainPromise.Ensure(__func__); // Pending promise. } [[fallthrough]]; case DrainState::DRAINED: if (mEncodedData.Length() > 0) { EncodedData pending = std::move(mEncodedData); return EncodePromise::CreateAndResolve(std::move(pending), __func__); } else { return EncodePromise::CreateAndResolve(EncodedData(), __func__); } } } template RefPtr AndroidDataEncoder::Shutdown() { return InvokeAsync(mTaskQueue, this, __func__, &AndroidDataEncoder::ProcessShutdown); } template RefPtr AndroidDataEncoder::ProcessShutdown() { AssertOnTaskQueue(); if (mJavaEncoder) { mJavaEncoder->Release(); mJavaEncoder = nullptr; } if (mJavaCallbacks) { JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel(); JavaCallbacksSupport::DisposeNative(mJavaCallbacks); mJavaCallbacks = nullptr; } mFormat = nullptr; return ShutdownPromise::CreateAndResolve(true, __func__); } template RefPtr AndroidDataEncoder::SetBitrate( const MediaDataEncoder::Rate aBitsPerSec) { RefPtr self(this); return InvokeAsync(mTaskQueue, __func__, [self, aBitsPerSec]() { self->mJavaEncoder->SetBitrate(aBitsPerSec); return GenericPromise::CreateAndResolve(true, __func__); }); return nullptr; } template void AndroidDataEncoder::Error(const MediaResult& aError) { if (!mTaskQueue->IsCurrentThreadIn()) { nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod( "AndroidDataEncoder::Error", this, &AndroidDataEncoder::Error, aError)); MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv)); Unused << rv; return; } AssertOnTaskQueue(); mError = Some(aError); } template void AndroidDataEncoder::CallbacksSupport::HandleInput( int64_t aTimestamp, bool aProcessed) {} template void AndroidDataEncoder::CallbacksSupport::HandleOutput( java::Sample::Param aSample, java::SampleBuffer::Param aBuffer) { MutexAutoLock lock(mMutex); if (mEncoder) { mEncoder->ProcessOutput(std::move(aSample), std::move(aBuffer)); } } template void AndroidDataEncoder::CallbacksSupport:: HandleOutputFormatChanged(java::sdk::MediaFormat::Param aFormat) {} template void AndroidDataEncoder::CallbacksSupport::HandleError( const MediaResult& aError) { MutexAutoLock lock(mMutex); if (mEncoder) { mEncoder->Error(aError); } } // Force compiler to generate code. template class AndroidDataEncoder; template class AndroidDataEncoder; template class AndroidDataEncoder; } // namespace mozilla #undef AND_ENC_LOG #undef AND_ENC_LOGE